For #913, source support complex error

pull/1080/head
winlin 7 years ago
parent abcaba33ee
commit 9802dc326e

@ -112,11 +112,12 @@ srs_error_t SrsConnection::cycle()
// TODO: FIXME: Only reset the error when client closed it.
if (srs_is_client_gracefully_close(srs_error_code(err))) {
srs_warn("client disconnect peer. ret=%d", srs_error_code(err));
srs_freep(err);
return srs_success;
} else {
srs_error("connect error %s", srs_error_desc(err).c_str());
}
return srs_error_wrap(err, "cycle");
srs_freep(err);
return srs_success;
}
int SrsConnection::srs_id()

@ -81,30 +81,25 @@ SrsFragment* SrsDvrSegmenter::current()
return fragment;
}
int SrsDvrSegmenter::open()
srs_error_t SrsDvrSegmenter::open()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// ignore when already open.
if (fs->is_open()) {
return ret;
return err;
}
string path = generate_path();
if (srs_path_exists(path)) {
ret = ERROR_DVR_CANNOT_APPEND;
srs_error("DVR can't append to exists path=%s. ret=%d", path.c_str(), ret);
return ret;
return srs_error_new(ERROR_DVR_CANNOT_APPEND, "DVR can't append to exists path=%s", path.c_str());
}
fragment->set_path(path);
// create dir first.
if ((err = fragment->create_dir()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "create dir");
}
// create jitter.
@ -114,106 +109,95 @@ int SrsDvrSegmenter::open()
// open file writer, in append or create mode.
string tmp_dvr_file = fragment->tmppath();
if ((ret = fs->open(tmp_dvr_file)) != ERROR_SUCCESS) {
srs_error("open file stream for file %s failed. ret=%d", path.c_str(), ret);
return ret;
return srs_error_new(ret, "open file %s", path.c_str());
}
// initialize the encoder.
if ((ret = open_encoder()) != ERROR_SUCCESS) {
srs_error("initialize enc by fs for file %s failed. ret=%d", path.c_str(), ret);
return ret;
if ((err = open_encoder()) != srs_success) {
return srs_error_wrap(err, "open encoder");
}
srs_trace("dvr stream %s to file %s", req->stream.c_str(), path.c_str());
return ret;
return err;
}
int SrsDvrSegmenter::write_metadata(SrsSharedPtrMessage* metadata)
srs_error_t SrsDvrSegmenter::write_metadata(SrsSharedPtrMessage* metadata)
{
return encode_metadata(metadata);
}
int SrsDvrSegmenter::write_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
srs_error_t SrsDvrSegmenter::write_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsSharedPtrMessage* audio = shared_audio->copy();
SrsAutoFree(SrsSharedPtrMessage, audio);
if ((jitter->correct(audio, jitter_algorithm)) != ERROR_SUCCESS) {
return ret;
if ((err = jitter->correct(audio, jitter_algorithm)) != srs_success) {
return srs_error_wrap(err, "jitter");
}
if ((ret = on_update_duration(audio)) != ERROR_SUCCESS) {
return ret;
if ((err = on_update_duration(audio)) != srs_success) {
return srs_error_wrap(err, "update duration");
}
if ((ret = encode_audio(audio, format)) != ERROR_SUCCESS) {
return ret;
if ((err = encode_audio(audio, format)) != srs_success) {
return srs_error_wrap(err, "encode audio");
}
return ret;
return err;
}
int SrsDvrSegmenter::write_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
srs_error_t SrsDvrSegmenter::write_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsSharedPtrMessage* video = shared_video->copy();
SrsAutoFree(SrsSharedPtrMessage, video);
if ((jitter->correct(video, jitter_algorithm)) != ERROR_SUCCESS) {
return ret;
if ((err = jitter->correct(video, jitter_algorithm)) != srs_success) {
return srs_error_wrap(err, "jitter");
}
if ((ret = encode_video(video, format)) != ERROR_SUCCESS) {
return ret;
if ((err = encode_video(video, format)) != srs_success) {
return srs_error_wrap(err, "encode video");
}
if ((ret = on_update_duration(video)) != ERROR_SUCCESS) {
return ret;
if ((err = on_update_duration(video)) != srs_success) {
return srs_error_wrap(err, "update duration");
}
return ret;
return err;
}
int SrsDvrSegmenter::close()
srs_error_t SrsDvrSegmenter::close()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// ignore when already closed.
if (!fs->is_open()) {
return ret;
return err;
}
// Close the encoder, then close the fs object.
if ((ret = close_encoder()) != ERROR_SUCCESS) {
return ret;
if ((err = close_encoder()) != srs_success) {
return srs_error_wrap(err, "close encoder");
}
fs->close();
// when tmp flv file exists, reap it.
if ((err = fragment->rename()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "rename fragment");
}
// TODO: FIXME: the http callback is async, which will trigger thread switch,
// so the on_video maybe invoked during the http callback, and error.
if ((err = plan->on_reap_segment()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
srs_error("dvr: notify plan to reap segment failed. ret=%d", ret);
return ret;
return srs_error_wrap(err, "reap segment");
}
return ret;
return err;
}
string SrsDvrSegmenter::generate_path()
@ -235,13 +219,10 @@ string SrsDvrSegmenter::generate_path()
return flv_path;
}
int SrsDvrSegmenter::on_update_duration(SrsSharedPtrMessage* msg)
srs_error_t SrsDvrSegmenter::on_update_duration(SrsSharedPtrMessage* msg)
{
int ret = ERROR_SUCCESS;
fragment->append(msg->timestamp);
return ret;
return srs_success;
}
srs_error_t SrsDvrSegmenter::on_reload_vhost_dvr(std::string vhost)
@ -273,13 +254,14 @@ SrsDvrFlvSegmenter::~SrsDvrFlvSegmenter()
srs_freep(enc);
}
int SrsDvrFlvSegmenter::refresh_metadata()
srs_error_t SrsDvrFlvSegmenter::refresh_metadata()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// no duration or filesize specified.
if (!duration_offset || !filesize_offset) {
return ret;
return err;
}
int64_t cur = fs->tellg();
@ -290,7 +272,7 @@ int SrsDvrFlvSegmenter::refresh_metadata()
SrsBuffer stream;
if ((ret = stream.initialize(buf, SrsAmf0Size::number())) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init stream");
}
// filesize to buf.
@ -299,13 +281,13 @@ int SrsDvrFlvSegmenter::refresh_metadata()
stream.skip(-1 * stream.pos());
if ((ret = size->write(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write filesize");
}
// update the flesize.
fs->seek2(filesize_offset);
if ((ret = fs->write(buf, SrsAmf0Size::number(), NULL)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "update filesize");
}
// duration to buf
@ -314,24 +296,25 @@ int SrsDvrFlvSegmenter::refresh_metadata()
stream.skip(-1 * stream.pos());
if ((ret = dur->write(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write duration");
}
// update the duration
fs->seek2(duration_offset);
if ((ret = fs->write(buf, SrsAmf0Size::number(), NULL)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "update duration");
}
// reset the offset.
fs->seek2(cur);
return ret;
return err;
}
int SrsDvrFlvSegmenter::open_encoder()
srs_error_t SrsDvrFlvSegmenter::open_encoder()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
has_keyframe = false;
@ -343,42 +326,42 @@ int SrsDvrFlvSegmenter::open_encoder()
enc = new SrsFlvTransmuxer();
if ((ret = enc->initialize(fs)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init encoder");
}
// write the flv header to writer.
if ((ret = enc->write_header()) != ERROR_SUCCESS) {
srs_error("write flv header failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "write flv header");
}
return ret;
return err;
}
int SrsDvrFlvSegmenter::encode_metadata(SrsSharedPtrMessage* metadata)
srs_error_t SrsDvrFlvSegmenter::encode_metadata(SrsSharedPtrMessage* metadata)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// Ignore when metadata already written.
if (duration_offset || filesize_offset) {
return ret;
return err;
}
SrsBuffer stream;
if ((ret = stream.initialize(metadata->payload, metadata->size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init stream");
}
SrsAmf0Any* name = SrsAmf0Any::str();
SrsAutoFree(SrsAmf0Any, name);
if ((ret = name->read(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read name");
}
SrsAmf0Object* obj = SrsAmf0Any::object();
SrsAutoFree(SrsAmf0Object, obj);
if ((ret = obj->read(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read object");
}
// remove duration and filesize.
@ -401,39 +384,41 @@ int SrsDvrFlvSegmenter::encode_metadata(SrsSharedPtrMessage* metadata)
// convert metadata to bytes.
if ((ret = stream.initialize(payload, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init stream");
}
if ((ret = name->write(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write name");
}
if ((ret = obj->write(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write object");
}
// to flv file.
if ((ret = enc->write_metadata(18, payload, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write metadata");
}
return ret;
return err;
}
int SrsDvrFlvSegmenter::encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format)
srs_error_t SrsDvrFlvSegmenter::encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
char* payload = audio->payload;
int size = audio->size;
if ((ret = enc->write_audio(audio->timestamp, payload, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write audio");
}
return ret;
return err;
}
int SrsDvrFlvSegmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* format)
srs_error_t SrsDvrFlvSegmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
char* payload = video->payload;
int size = video->size;
@ -446,21 +431,18 @@ int SrsDvrFlvSegmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* form
// accept the sequence header here.
// when got no keyframe, ignore when should wait keyframe.
if (!has_keyframe && !sh) {
if (wait_keyframe) {
srs_info("dvr: ignore when wait keyframe.");
return ret;
}
if (!has_keyframe && !sh && wait_keyframe) {
return err;
}
if ((ret = enc->write_video(video->timestamp, payload, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "write video");
}
return ret;
return err;
}
int SrsDvrFlvSegmenter::close_encoder()
srs_error_t SrsDvrFlvSegmenter::close_encoder()
{
return refresh_metadata();
}
@ -475,32 +457,36 @@ SrsDvrMp4Segmenter::~SrsDvrMp4Segmenter()
srs_freep(enc);
}
int SrsDvrMp4Segmenter::refresh_metadata()
srs_error_t SrsDvrMp4Segmenter::refresh_metadata()
{
return ERROR_SUCCESS;
return srs_success;
}
int SrsDvrMp4Segmenter::open_encoder()
srs_error_t SrsDvrMp4Segmenter::open_encoder()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
srs_freep(enc);
enc = new SrsMp4Encoder();
if ((ret = enc->initialize(fs)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init encoder");
}
return ret;
return err;
}
int SrsDvrMp4Segmenter::encode_metadata(SrsSharedPtrMessage* /*metadata*/)
srs_error_t SrsDvrMp4Segmenter::encode_metadata(SrsSharedPtrMessage* /*metadata*/)
{
return ERROR_SUCCESS;
return srs_success;
}
int SrsDvrMp4Segmenter::encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format)
srs_error_t SrsDvrMp4Segmenter::encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsAudioCodecId sound_format = format->acodec->id;
SrsAudioSampleRate sound_rate = format->acodec->sound_rate;
SrsAudioSampleBits sound_size = format->acodec->sound_size;
@ -518,11 +504,18 @@ int SrsDvrMp4Segmenter::encode_audio(SrsSharedPtrMessage* audio, SrsFormat* form
uint32_t nb_sample = (uint32_t)format->nb_raw;
uint32_t dts = (uint32_t)audio->timestamp;
return enc->write_sample(SrsMp4HandlerTypeSOUN, 0x00, ct, dts, dts, sample, nb_sample);
if ((ret = enc->write_sample(SrsMp4HandlerTypeSOUN, 0x00, ct, dts, dts, sample, nb_sample)) != ERROR_SUCCESS) {
return srs_error_new(ret, "write sample");
}
return err;
}
int SrsDvrMp4Segmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* format)
srs_error_t SrsDvrMp4Segmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsVideoAvcFrameType frame_type = format->video->frame_type;
SrsVideoCodecId codec_id = format->vcodec->id;
@ -538,18 +531,23 @@ int SrsDvrMp4Segmenter::encode_video(SrsSharedPtrMessage* video, SrsFormat* form
uint8_t* sample = (uint8_t*)format->raw;
uint32_t nb_sample = (uint32_t)format->nb_raw;
return enc->write_sample(SrsMp4HandlerTypeVIDE, frame_type, ct, dts, pts, sample, nb_sample);
if ((ret = enc->write_sample(SrsMp4HandlerTypeVIDE, frame_type, ct, dts, pts, sample, nb_sample)) != ERROR_SUCCESS) {
return srs_error_new(ret, "write sample");
}
return err;
}
int SrsDvrMp4Segmenter::close_encoder()
srs_error_t SrsDvrMp4Segmenter::close_encoder()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = enc->flush()) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "flush encoder");
}
return ret;
return err;
}
SrsDvrAsyncCallOnDvr::SrsDvrAsyncCallOnDvr(int c, SrsRequest* r, string p)
@ -579,13 +577,9 @@ srs_error_t SrsDvrAsyncCallOnDvr::call()
if (true) {
SrsConfDirective* conf = _srs_config->get_vhost_on_dvr(req->vhost);
if (!conf) {
srs_info("ignore the empty http callback: on_dvr");
return err;
if (conf) {
hooks = conf->args;
}
hooks = conf->args;
}
for (int i = 0; i < (int)hooks.size(); i++) {
@ -639,45 +633,45 @@ srs_error_t SrsDvrPlan::initialize(SrsOriginHub* h, SrsDvrSegmenter* s, SrsReque
return err;
}
int SrsDvrPlan::on_meta_data(SrsSharedPtrMessage* shared_metadata)
srs_error_t SrsDvrPlan::on_meta_data(SrsSharedPtrMessage* shared_metadata)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!dvr_enabled) {
return ret;
return err;
}
return segment->write_metadata(shared_metadata);
}
int SrsDvrPlan::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
srs_error_t SrsDvrPlan::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!dvr_enabled) {
return ret;
return err;
}
if ((ret = segment->write_audio(shared_audio, format)) != ERROR_SUCCESS) {
return ret;
if ((err = segment->write_audio(shared_audio, format)) != srs_success) {
return srs_error_wrap(err, "write audio");
}
return ret;
return err;
}
int SrsDvrPlan::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
srs_error_t SrsDvrPlan::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!dvr_enabled) {
return ret;
return err;
}
if ((ret = segment->write_video(shared_video, format)) != ERROR_SUCCESS) {
return ret;
if ((err = segment->write_video(shared_video, format)) != srs_success) {
return srs_error_wrap(err, "write video");
}
return ret;
return err;
}
srs_error_t SrsDvrPlan::on_reap_segment()
@ -719,30 +713,30 @@ SrsDvrSessionPlan::~SrsDvrSessionPlan()
{
}
int SrsDvrSessionPlan::on_publish()
srs_error_t SrsDvrSessionPlan::on_publish()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// support multiple publish.
if (dvr_enabled) {
return ret;
return err;
}
if (!_srs_config->get_dvr_enabled(req->vhost)) {
return ret;
return err;
}
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->close()) != srs_success) {
return srs_error_wrap(err, "close segment");
}
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->open()) != srs_success) {
return srs_error_wrap(err, "open segment");
}
dvr_enabled = true;
return ret;
return err;
}
void SrsDvrSessionPlan::on_unpublish()
@ -753,9 +747,9 @@ void SrsDvrSessionPlan::on_unpublish()
}
// ignore error.
int ret = segment->close();
if (ret != ERROR_SUCCESS) {
srs_warn("ignore flv close error. ret=%d", ret);
srs_error_t err = segment->close();
if (err != srs_success) {
srs_warn("ignore flv close error %s", srs_error_desc(err).c_str());
}
dvr_enabled = false;
@ -788,111 +782,109 @@ srs_error_t SrsDvrSegmentPlan::initialize(SrsOriginHub* h, SrsDvrSegmenter* s, S
return srs_success;
}
int SrsDvrSegmentPlan::on_publish()
srs_error_t SrsDvrSegmentPlan::on_publish()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// support multiple publish.
if (dvr_enabled) {
return ret;
return err;
}
if (!_srs_config->get_dvr_enabled(req->vhost)) {
return ret;
return err;
}
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->close()) != srs_success) {
return srs_error_wrap(err, "segment close");
}
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->open()) != srs_success) {
return srs_error_wrap(err, "segment open");
}
dvr_enabled = true;
return ret;
return err;
}
void SrsDvrSegmentPlan::on_unpublish()
{
}
int SrsDvrSegmentPlan::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
srs_error_t SrsDvrSegmentPlan::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = update_duration(shared_audio)) != ERROR_SUCCESS) {
return ret;
if ((err = update_duration(shared_audio)) != srs_success) {
return srs_error_wrap(err, "update duration");
}
if ((ret = SrsDvrPlan::on_audio(shared_audio, format)) != ERROR_SUCCESS) {
return ret;
if ((err = SrsDvrPlan::on_audio(shared_audio, format)) != srs_success) {
return srs_error_wrap(err, "consume audio");
}
return ret;
return err;
}
int SrsDvrSegmentPlan::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
srs_error_t SrsDvrSegmentPlan::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = update_duration(shared_video)) != ERROR_SUCCESS) {
return ret;
if ((err = update_duration(shared_video)) != srs_success) {
return srs_error_wrap(err, "update duration");
}
if ((ret = SrsDvrPlan::on_video(shared_video, format)) != ERROR_SUCCESS) {
return ret;
if ((err = SrsDvrPlan::on_video(shared_video, format)) != srs_success) {
return srs_error_wrap(err, "consume video");
}
return ret;
return err;
}
int SrsDvrSegmentPlan::update_duration(SrsSharedPtrMessage* msg)
srs_error_t SrsDvrSegmentPlan::update_duration(SrsSharedPtrMessage* msg)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
srs_assert(segment);
// ignore if duration ok.
SrsFragment* fragment = segment->current();
if (cduration <= 0 || fragment->duration() < cduration) {
return ret;
return err;
}
// when wait keyframe, ignore if no frame arrived.
// @see https://github.com/ossrs/srs/issues/177
if (wait_keyframe) {
if (!msg->is_video()) {
return ret;
return err;
}
char* payload = msg->payload;
int size = msg->size;
bool is_key_frame = SrsFlvVideo::h264(payload, size)
&& SrsFlvVideo::keyframe(payload, size)
&& !SrsFlvVideo::sh(payload, size);
bool is_key_frame = SrsFlvVideo::h264(payload, size) && SrsFlvVideo::keyframe(payload, size) && !SrsFlvVideo::sh(payload, size);
if (!is_key_frame) {
return ret;
return err;
}
}
// reap segment
if ((ret = segment->close()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->close()) != srs_success) {
return srs_error_wrap(err, "segment close");
}
// open new flv file
if ((ret = segment->open()) != ERROR_SUCCESS) {
return ret;
if ((err = segment->open()) != srs_success) {
return srs_error_wrap(err, "segment open");
}
// update sequence header
if ((ret = hub->on_dvr_request_sh()) != ERROR_SUCCESS) {
return ret;
if ((err = hub->on_dvr_request_sh()) != srs_success) {
return srs_error_wrap(err, "request sh");
}
return ret;
return err;
}
srs_error_t SrsDvrSegmentPlan::on_reload_vhost_dvr(string vhost)
@ -959,20 +951,20 @@ srs_error_t SrsDvr::initialize(SrsOriginHub* h, SrsRequest* r)
return err;
}
int SrsDvr::on_publish()
srs_error_t SrsDvr::on_publish()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// the dvr for this stream is not actived.
if (!actived) {
return ret;
return err;
}
if ((ret = plan->on_publish()) != ERROR_SUCCESS) {
return ret;
if ((err = plan->on_publish()) != srs_success) {
return srs_error_wrap(err, "publish");
}
return ret;
return err;
}
void SrsDvr::on_unpublish()
@ -980,37 +972,37 @@ void SrsDvr::on_unpublish()
plan->on_unpublish();
}
int SrsDvr::on_meta_data(SrsSharedPtrMessage* metadata)
srs_error_t SrsDvr::on_meta_data(SrsSharedPtrMessage* metadata)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// the dvr for this stream is not actived.
if (!actived) {
return ret;
return err;
}
if ((ret = plan->on_meta_data(metadata)) != ERROR_SUCCESS) {
return ret;
if ((err = plan->on_meta_data(metadata)) != srs_success) {
return srs_error_wrap(err, "metadata");
}
return ret;
return err;
}
int SrsDvr::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
srs_error_t SrsDvr::on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format)
{
// the dvr for this stream is not actived.
if (!actived) {
return ERROR_SUCCESS;
return srs_success;
}
return plan->on_audio(shared_audio, format);
}
int SrsDvr::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
srs_error_t SrsDvr::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
{
// the dvr for this stream is not actived.
if (!actived) {
return ERROR_SUCCESS;
return srs_success;
}
return plan->on_video(shared_video, format);
@ -1018,7 +1010,6 @@ int SrsDvr::on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format)
srs_error_t SrsDvr::on_reload_vhost_dvr_apply(string vhost)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsConfDirective* conf = _srs_config->get_dvr_apply(req->vhost);
@ -1035,11 +1026,11 @@ srs_error_t SrsDvr::on_reload_vhost_dvr_apply(string vhost)
return err;
}
if ((ret = on_publish()) != ERROR_SUCCESS) {
return srs_error_new(ret, "on publish");
if ((err = on_publish()) != srs_success) {
return srs_error_wrap(err, "on publish");
}
if ((ret = hub->on_dvr_request_sh()) != ERROR_SUCCESS) {
return srs_error_new(ret, "request sh");
if ((err = hub->on_dvr_request_sh()) != srs_success) {
return srs_error_wrap(err, "request sh");
}
return err;

@ -78,33 +78,33 @@ public:
// Open new segment file.
// @param use_tmp_file Whether use tmp file for DVR, and rename when close.
// @remark Ignore when file is already open.
virtual int open();
virtual srs_error_t open();
// Write the metadata.
virtual int write_metadata(SrsSharedPtrMessage* metadata);
virtual srs_error_t write_metadata(SrsSharedPtrMessage* metadata);
// Write audio packet.
// @param shared_audio, directly ptr, copy it if need to save it.
virtual int write_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
virtual srs_error_t write_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
// Write video packet.
// @param shared_video, directly ptr, copy it if need to save it.
virtual int write_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
virtual srs_error_t write_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
// Refresh the metadata. For example, there is duration in flv metadata,
// when DVR in append mode, the duration must be update every some seconds.
// @remark Maybe ignored by concreate segmenter.
virtual int refresh_metadata() = 0;
virtual srs_error_t refresh_metadata() = 0;
// Close current segment.
// @remark ignore when already closed.
virtual int close();
virtual srs_error_t close();
protected:
virtual int open_encoder() = 0;
virtual int encode_metadata(SrsSharedPtrMessage* metadata) = 0;
virtual int encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format) = 0;
virtual int encode_video(SrsSharedPtrMessage* video, SrsFormat* format) = 0;
virtual int close_encoder() = 0;
virtual srs_error_t open_encoder() = 0;
virtual srs_error_t encode_metadata(SrsSharedPtrMessage* metadata) = 0;
virtual srs_error_t encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format) = 0;
virtual srs_error_t encode_video(SrsSharedPtrMessage* video, SrsFormat* format) = 0;
virtual srs_error_t close_encoder() = 0;
private:
// Generate the flv segment path.
virtual std::string generate_path();
// When update the duration of segment by rtmp msg.
virtual int on_update_duration(SrsSharedPtrMessage* msg);
virtual srs_error_t on_update_duration(SrsSharedPtrMessage* msg);
// interface ISrsReloadHandler
public:
virtual srs_error_t on_reload_vhost_dvr(std::string vhost);
@ -131,13 +131,13 @@ public:
SrsDvrFlvSegmenter();
virtual ~SrsDvrFlvSegmenter();
public:
virtual int refresh_metadata();
virtual srs_error_t refresh_metadata();
protected:
virtual int open_encoder();
virtual int encode_metadata(SrsSharedPtrMessage* metadata);
virtual int encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format);
virtual int encode_video(SrsSharedPtrMessage* video, SrsFormat* format);
virtual int close_encoder();
virtual srs_error_t open_encoder();
virtual srs_error_t encode_metadata(SrsSharedPtrMessage* metadata);
virtual srs_error_t encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format);
virtual srs_error_t encode_video(SrsSharedPtrMessage* video, SrsFormat* format);
virtual srs_error_t close_encoder();
};
/**
@ -152,13 +152,13 @@ public:
SrsDvrMp4Segmenter();
virtual ~SrsDvrMp4Segmenter();
public:
virtual int refresh_metadata();
virtual srs_error_t refresh_metadata();
protected:
virtual int open_encoder();
virtual int encode_metadata(SrsSharedPtrMessage* metadata);
virtual int encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format);
virtual int encode_video(SrsSharedPtrMessage* video, SrsFormat* format);
virtual int close_encoder();
virtual srs_error_t open_encoder();
virtual srs_error_t encode_metadata(SrsSharedPtrMessage* metadata);
virtual srs_error_t encode_audio(SrsSharedPtrMessage* audio, SrsFormat* format);
virtual srs_error_t encode_video(SrsSharedPtrMessage* video, SrsFormat* format);
virtual srs_error_t close_encoder();
};
/**
@ -195,12 +195,12 @@ public:
virtual ~SrsDvrPlan();
public:
virtual srs_error_t initialize(SrsOriginHub* h, SrsDvrSegmenter* s, SrsRequest* r);
virtual int on_publish() = 0;
virtual srs_error_t on_publish() = 0;
virtual void on_unpublish() = 0;
virtual int on_meta_data(SrsSharedPtrMessage* shared_metadata);
virtual int on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
virtual int on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
// Internal interface for segmenter.
virtual srs_error_t on_meta_data(SrsSharedPtrMessage* shared_metadata);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
// Internal interface for segmenter.
public:
// When segmenter close a segment.
virtual srs_error_t on_reap_segment();
@ -217,7 +217,7 @@ public:
SrsDvrSessionPlan();
virtual ~SrsDvrSessionPlan();
public:
virtual int on_publish();
virtual srs_error_t on_publish();
virtual void on_unpublish();
};
@ -235,12 +235,12 @@ public:
virtual ~SrsDvrSegmentPlan();
public:
virtual srs_error_t initialize(SrsOriginHub* h, SrsDvrSegmenter* s, SrsRequest* r);
virtual int on_publish();
virtual srs_error_t on_publish();
virtual void on_unpublish();
virtual int on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
virtual int on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* format);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
private:
virtual int update_duration(SrsSharedPtrMessage* msg);
virtual srs_error_t update_duration(SrsSharedPtrMessage* msg);
// interface ISrsReloadHandler
public:
virtual srs_error_t on_reload_vhost_dvr(std::string vhost);
@ -275,7 +275,7 @@ public:
* when encoder start to publish RTMP stream.
* @param fetch_sequence_header whether fetch sequence from source.
*/
virtual int on_publish();
virtual srs_error_t on_publish();
/**
* the unpublish event.,
* when encoder stop(unpublish) to publish RTMP stream.
@ -284,17 +284,17 @@ public:
/**
* get some information from metadata, it's optinal.
*/
virtual int on_meta_data(SrsSharedPtrMessage* metadata);
virtual srs_error_t on_meta_data(SrsSharedPtrMessage* metadata);
/**
* mux the audio packets to dvr.
* @param shared_audio, directly ptr, copy it if need to save it.
*/
virtual int on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* foramt);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio, SrsFormat* foramt);
/**
* mux the video packets to dvr.
* @param shared_video, directly ptr, copy it if need to save it.
*/
virtual int on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video, SrsFormat* format);
// interface ISrsReloadHandler
public:
virtual srs_error_t on_reload_vhost_dvr_apply(std::string vhost);

@ -189,11 +189,10 @@ srs_error_t SrsEdgeIngester::initialize(SrsSource* s, SrsPlayEdge* e, SrsRequest
srs_error_t SrsEdgeIngester::start()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = source->on_publish()) != ERROR_SUCCESS) {
return srs_error_new(ret, "notify source");
if ((err = source->on_publish()) != srs_success) {
return srs_error_wrap(err, "notify source");
}
srs_freep(trd);
@ -262,8 +261,8 @@ srs_error_t SrsEdgeIngester::do_cycle()
// reset the redirect to empty, for maybe the origin changed.
redirect = "";
if ((ret = source->on_source_id_changed(_srs_context->get_id())) != ERROR_SUCCESS) {
return srs_error_new(ret, "on source id changed");
if ((err = source->on_source_id_changed(_srs_context->get_id())) != srs_success) {
return srs_error_wrap(err, "on source id changed");
}
if ((ret = upstream->connect(req, lb)) != ERROR_SUCCESS) {
@ -274,17 +273,17 @@ srs_error_t SrsEdgeIngester::do_cycle()
return srs_error_new(ret, "notify edge play");
}
ret = ingest();
err = ingest();
// retry for rtmp 302 immediately.
if (ret == ERROR_CONTROL_REDIRECT) {
ret = ERROR_SUCCESS;
if (srs_error_code(err) == ERROR_CONTROL_REDIRECT) {
srs_error_reset(err);
continue;
}
if (srs_is_client_gracefully_close(ret)) {
srs_warn("origin disconnected, retry. ret=%d", ret);
ret = ERROR_SUCCESS;
if (srs_is_client_gracefully_close(err)) {
srs_warn("origin disconnected, retry, error %s", srs_error_desc(err).c_str());
srs_error_reset(err);
}
break;
}
@ -292,9 +291,10 @@ srs_error_t SrsEdgeIngester::do_cycle()
return srs_error_new(ret, "cycle");
}
int SrsEdgeIngester::ingest()
srs_error_t SrsEdgeIngester::ingest()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsPithyPrint* pprint = SrsPithyPrint::create_edge();
SrsAutoFree(SrsPithyPrint, pprint);
@ -305,10 +305,7 @@ int SrsEdgeIngester::ingest()
while (true) {
srs_error_t err = srs_success;
if ((err = trd->pull()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "thread quit");
}
pprint->elapse();
@ -321,82 +318,71 @@ int SrsEdgeIngester::ingest()
// read from client.
SrsCommonMessage* msg = NULL;
if ((ret = upstream->recv_message(&msg)) != ERROR_SUCCESS) {
if (!srs_is_client_gracefully_close(ret)) {
srs_error("pull origin server message failed. ret=%d", ret);
}
return ret;
return srs_error_new(ret, "recv message");
}
srs_verbose("edge loop recv message. ret=%d", ret);
srs_assert(msg);
SrsAutoFree(SrsCommonMessage, msg);
if ((ret = process_publish_message(msg)) != ERROR_SUCCESS) {
return ret;
if ((err = process_publish_message(msg)) != srs_success) {
return srs_error_wrap(err, "process message");
}
}
return ret;
return err;
}
int SrsEdgeIngester::process_publish_message(SrsCommonMessage* msg)
srs_error_t SrsEdgeIngester::process_publish_message(SrsCommonMessage* msg)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// process audio packet
if (msg->header.is_audio()) {
if ((ret = source->on_audio(msg)) != ERROR_SUCCESS) {
srs_error("source process audio message failed. ret=%d", ret);
return ret;
if ((err = source->on_audio(msg)) != srs_success) {
return srs_error_wrap(err, "source consume audio");
}
}
// process video packet
if (msg->header.is_video()) {
if ((ret = source->on_video(msg)) != ERROR_SUCCESS) {
srs_error("source process video message failed. ret=%d", ret);
return ret;
if ((err = source->on_video(msg)) != srs_success) {
return srs_error_wrap(err, "source consume video");
}
}
// process aggregate packet
if (msg->header.is_aggregate()) {
if ((ret = source->on_aggregate(msg)) != ERROR_SUCCESS) {
srs_error("source process aggregate message failed. ret=%d", ret);
return ret;
if ((err = source->on_aggregate(msg)) != srs_success) {
return srs_error_wrap(err, "source consume aggregate");
}
return ret;
return err;
}
// process onMetaData
if (msg->header.is_amf0_data() || msg->header.is_amf3_data()) {
SrsPacket* pkt = NULL;
if ((ret = upstream->decode_message(msg, &pkt)) != ERROR_SUCCESS) {
srs_error("decode onMetaData message failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "decode message");
}
SrsAutoFree(SrsPacket, pkt);
if (dynamic_cast<SrsOnMetaDataPacket*>(pkt)) {
SrsOnMetaDataPacket* metadata = dynamic_cast<SrsOnMetaDataPacket*>(pkt);
if ((ret = source->on_meta_data(msg, metadata)) != ERROR_SUCCESS) {
srs_error("source process onMetaData message failed. ret=%d", ret);
return ret;
if ((err = source->on_meta_data(msg, metadata)) != srs_success) {
return srs_error_wrap(err, "source consume metadata");
}
srs_info("process onMetaData message success.");
return ret;
return err;
}
srs_info("ignore AMF0/AMF3 data message.");
return ret;
return err;
}
// call messages, for example, reject, redirect.
if (msg->header.is_amf0_command() || msg->header.is_amf3_command()) {
SrsPacket* pkt = NULL;
if ((ret = upstream->decode_message(msg, &pkt)) != ERROR_SUCCESS) {
srs_error("decode call message failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "decode message");
}
SrsAutoFree(SrsPacket, pkt);
@ -404,35 +390,33 @@ int SrsEdgeIngester::process_publish_message(SrsCommonMessage* msg)
if (dynamic_cast<SrsCallPacket*>(pkt)) {
SrsCallPacket* call = dynamic_cast<SrsCallPacket*>(pkt);
if (!call->arguments->is_object()) {
return ret;
return err;
}
SrsAmf0Any* prop = NULL;
SrsAmf0Object* evt = call->arguments->to_object();
if ((prop = evt->ensure_property_string("level")) == NULL) {
return ret;
return err;
} else if (prop->to_str() != StatusLevelError) {
return ret;
return err;
}
if ((prop = evt->get_property("ex")) == NULL || !prop->is_object()) {
return ret;
return err;
}
SrsAmf0Object* ex = prop->to_object();
if ((prop = ex->ensure_property_string("redirect")) == NULL) {
return ret;
return err;
}
redirect = prop->to_str();
ret = ERROR_CONTROL_REDIRECT;
srs_info("RTMP 302 redirect to %s, ret=%d", redirect.c_str(), ret);
return ret;
return srs_error_new(ERROR_CONTROL_REDIRECT, "RTMP 302 redirect to %s", redirect.c_str());
}
}
return ret;
return err;
}
SrsEdgeForwarder::SrsEdgeForwarder()
@ -591,8 +575,8 @@ srs_error_t SrsEdgeForwarder::do_cycle()
// forward all messages.
// each msg in msgs.msgs must be free, for the SrsMessageArray never free them.
int count = 0;
if ((ret = queue->dump_packets(msgs.max, msgs.msgs, count)) != ERROR_SUCCESS) {
return srs_error_new(ret, "queue dumps packets");
if ((err = queue->dump_packets(msgs.max, msgs.msgs, count)) != srs_success) {
return srs_error_wrap(err, "queue dumps packets");
}
pprint->elapse();
@ -617,13 +601,13 @@ srs_error_t SrsEdgeForwarder::do_cycle()
return err;
}
int SrsEdgeForwarder::proxy(SrsCommonMessage* msg)
srs_error_t SrsEdgeForwarder::proxy(SrsCommonMessage* msg)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = send_error_code) != ERROR_SUCCESS) {
srs_error("publish edge proxy thread send error, ret=%d", ret);
return ret;
return srs_error_new(ret, "edge forwarder");
}
// the msg is auto free by source,
@ -631,24 +615,21 @@ int SrsEdgeForwarder::proxy(SrsCommonMessage* msg)
if (msg->size <= 0
|| msg->header.is_set_chunk_size()
|| msg->header.is_window_ackledgement_size()
|| msg->header.is_ackledgement()
) {
return ret;
|| msg->header.is_ackledgement()) {
return err;
}
SrsSharedPtrMessage copy;
if ((ret = copy.create(msg)) != ERROR_SUCCESS) {
srs_error("initialize the msg failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "create message");
}
srs_verbose("initialize shared ptr msg success.");
copy.stream_id = sdk->sid();
if ((ret = queue->enqueue(copy.copy())) != ERROR_SUCCESS) {
srs_error("enqueue edge publish msg failed. ret=%d", ret);
if ((err = queue->enqueue(copy.copy())) != srs_success) {
return srs_error_wrap(err, "enqueue message");
}
return ret;
return err;
}
SrsPlayEdge::SrsPlayEdge()
@ -761,17 +742,13 @@ bool SrsPublishEdge::can_publish()
return state != SrsEdgeStatePublish;
}
int SrsPublishEdge::on_client_publish()
srs_error_t SrsPublishEdge::on_client_publish()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// error when not init state.
if (state != SrsEdgeStateInit) {
ret = ERROR_RTMP_EDGE_PUBLISH_STATE;
srs_error("invalid state for client to publish stream on edge. "
"state=%d, ret=%d", state, ret);
return ret;
return srs_error_new(ERROR_RTMP_EDGE_PUBLISH_STATE, "invalid state");
}
// @see https://github.com/ossrs/srs/issues/180
@ -786,22 +763,18 @@ int SrsPublishEdge::on_client_publish()
// start to forward stream to origin.
err = forwarder->start();
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
// @see https://github.com/ossrs/srs/issues/180
// when failed, revert to init
if (ret != ERROR_SUCCESS) {
if (err != srs_success) {
SrsEdgeState pstate = state;
state = SrsEdgeStateInit;
srs_trace("edge revert from %d to state %d (push). ret=%d", pstate, state, ret);
srs_trace("edge revert from %d to state %d (push), error %s", pstate, state, srs_error_desc(err).c_str());
}
return ret;
return err;
}
int SrsPublishEdge::on_proxy_publish(SrsCommonMessage* msg)
srs_error_t SrsPublishEdge::on_proxy_publish(SrsCommonMessage* msg)
{
return forwarder->proxy(msg);
}

@ -139,8 +139,8 @@ public:
private:
virtual srs_error_t do_cycle();
private:
virtual int ingest();
virtual int process_publish_message(SrsCommonMessage* msg);
virtual srs_error_t ingest();
virtual srs_error_t process_publish_message(SrsCommonMessage* msg);
};
/**
@ -181,7 +181,7 @@ public:
private:
virtual srs_error_t do_cycle();
public:
virtual int proxy(SrsCommonMessage* msg);
virtual srs_error_t proxy(SrsCommonMessage* msg);
};
/**
@ -239,11 +239,11 @@ public:
/**
* when client publish stream on edge.
*/
virtual int on_client_publish();
virtual srs_error_t on_client_publish();
/**
* proxy publish stream to edge
*/
virtual int on_proxy_publish(SrsCommonMessage* msg);
virtual srs_error_t on_proxy_publish(SrsCommonMessage* msg);
/**
* proxy unpublish stream to edge.
*/

@ -71,9 +71,9 @@ SrsForwarder::~SrsForwarder()
srs_freep(sh_audio);
}
int SrsForwarder::initialize(SrsRequest* r, string ep)
srs_error_t SrsForwarder::initialize(SrsRequest* r, string ep)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// it's ok to use the request object,
// SrsSource already copy it and never delete it.
@ -82,7 +82,7 @@ int SrsForwarder::initialize(SrsRequest* r, string ep)
// the ep(endpoint) to forward to
ep_forward = ep;
return ret;
return err;
}
void SrsForwarder::set_queue_size(double queue_size)
@ -90,9 +90,8 @@ void SrsForwarder::set_queue_size(double queue_size)
queue->set_queue_size(queue_size);
}
int SrsForwarder::on_publish()
srs_error_t SrsForwarder::on_publish()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// discovery the server port and tcUrl from req and ep_forward.
@ -127,26 +126,17 @@ int SrsForwarder::on_publish()
dest_ep += req->vhost;
if (source_ep == dest_ep) {
ret = ERROR_SYSTEM_FORWARD_LOOP;
srs_warn("forward loop detected. src=%s, dest=%s, ret=%d",
source_ep.c_str(), dest_ep.c_str(), ret);
return ret;
return srs_error_new(ERROR_SYSTEM_FORWARD_LOOP, "forward loop detected. src=%s, dest=%s", source_ep.c_str(), dest_ep.c_str());
}
srs_trace("start forward %s to %s, tcUrl=%s, stream=%s",
source_ep.c_str(), dest_ep.c_str(), tcUrl.c_str(),
req->stream.c_str());
srs_trace("start forward %s to %s, tcUrl=%s, stream=%s", source_ep.c_str(), dest_ep.c_str(), tcUrl.c_str(), req->stream.c_str());
srs_freep(trd);
trd = new SrsSTCoroutine("forward", this);
if ((err = trd->start()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "start thread");
}
return ret;
return err;
}
void SrsForwarder::on_unpublish()
@ -155,43 +145,33 @@ void SrsForwarder::on_unpublish()
sdk->close();
}
int SrsForwarder::on_meta_data(SrsSharedPtrMessage* shared_metadata)
srs_error_t SrsForwarder::on_meta_data(SrsSharedPtrMessage* shared_metadata)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsSharedPtrMessage* metadata = shared_metadata->copy();
// TODO: FIXME: config the jitter of Forwarder.
if ((err = jitter->correct(metadata, SrsRtmpJitterAlgorithmOFF)) != srs_success) {
srs_freep(metadata);
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "jitter");
}
if ((ret = queue->enqueue(metadata)) != ERROR_SUCCESS) {
return ret;
if ((err = queue->enqueue(metadata)) != srs_success) {
return srs_error_wrap(err, "enqueue metadata");
}
return ret;
return err;
}
int SrsForwarder::on_audio(SrsSharedPtrMessage* shared_audio)
srs_error_t SrsForwarder::on_audio(SrsSharedPtrMessage* shared_audio)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsSharedPtrMessage* msg = shared_audio->copy();
// TODO: FIXME: config the jitter of Forwarder.
if ((err = jitter->correct(msg, SrsRtmpJitterAlgorithmOFF)) != srs_success) {
srs_freep(msg);
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "jitter");
}
if (SrsFlvAudio::sh(msg->payload, msg->size)) {
@ -199,27 +179,22 @@ int SrsForwarder::on_audio(SrsSharedPtrMessage* shared_audio)
sh_audio = msg->copy();
}
if ((ret = queue->enqueue(msg)) != ERROR_SUCCESS) {
return ret;
if ((err = queue->enqueue(msg)) != srs_success) {
return srs_error_wrap(err, "enqueue audio");
}
return ret;
return err;
}
int SrsForwarder::on_video(SrsSharedPtrMessage* shared_video)
srs_error_t SrsForwarder::on_video(SrsSharedPtrMessage* shared_video)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
SrsSharedPtrMessage* msg = shared_video->copy();
// TODO: FIXME: config the jitter of Forwarder.
if ((err = jitter->correct(msg, SrsRtmpJitterAlgorithmOFF)) != srs_success) {
srs_freep(msg);
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "jitter");
}
if (SrsFlvVideo::sh(msg->payload, msg->size)) {
@ -227,11 +202,11 @@ int SrsForwarder::on_video(SrsSharedPtrMessage* shared_video)
sh_video = msg->copy();
}
if ((ret = queue->enqueue(msg)) != ERROR_SUCCESS) {
return ret;
if ((err = queue->enqueue(msg)) != srs_success) {
return srs_error_wrap(err, "enqueue video");
}
return ret;
return err;
}
// when error, forwarder sleep for a while and retry.
@ -287,21 +262,22 @@ srs_error_t SrsForwarder::do_cycle()
return srs_error_new(ret, "sdk publish");
}
if ((ret = hub->on_forwarder_start(this)) != ERROR_SUCCESS) {
return srs_error_new(ret, "notify hub start");
if ((err = hub->on_forwarder_start(this)) != srs_success) {
return srs_error_wrap(err, "notify hub start");
}
if ((ret = forward()) != ERROR_SUCCESS) {
return srs_error_new(ret, "forward");
if ((err = forward()) != srs_success) {
return srs_error_wrap(err, "forward");
}
return err;
}
#define SYS_MAX_FORWARD_SEND_MSGS 128
int SrsForwarder::forward()
srs_error_t SrsForwarder::forward()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
sdk->set_recv_timeout(SRS_CONSTS_RTMP_PULSE_TMMS);
@ -314,24 +290,18 @@ int SrsForwarder::forward()
// TODO: FIXME: maybe need to zero the sequence header timestamp.
if (sh_video) {
if ((ret = sdk->send_and_free_message(sh_video->copy())) != ERROR_SUCCESS) {
srs_error("forwarder send sh_video to server failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "send video sh");
}
}
if (sh_audio) {
if ((ret = sdk->send_and_free_message(sh_audio->copy())) != ERROR_SUCCESS) {
srs_error("forwarder send sh_audio to server failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "send audio sh");
}
}
while (true) {
srs_error_t err = srs_success;
if ((err = trd->pull()) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
return srs_error_wrap(err, "thread quit");
}
pprint->elapse();
@ -341,10 +311,8 @@ int SrsForwarder::forward()
SrsCommonMessage* msg = NULL;
ret = sdk->recv_message(&msg);
srs_verbose("play loop recv message. ret=%d", ret);
if (ret != ERROR_SUCCESS && ret != ERROR_SOCKET_TIMEOUT) {
srs_error("recv server control message failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "receive control message");
}
srs_freep(msg);
@ -353,9 +321,8 @@ int SrsForwarder::forward()
// forward all messages.
// each msg in msgs.msgs must be free, for the SrsMessageArray never free them.
int count = 0;
if ((ret = queue->dump_packets(msgs.max, msgs.msgs, count)) != ERROR_SUCCESS) {
srs_error("get message to forward failed. ret=%d", ret);
return ret;
if ((err = queue->dump_packets(msgs.max, msgs.msgs, count)) != srs_success) {
return srs_error_wrap(err, "dump packets");
}
// pithy print
@ -365,18 +332,16 @@ int SrsForwarder::forward()
// ignore when no messages.
if (count <= 0) {
srs_verbose("no packets to forward.");
continue;
}
// sendout messages, all messages are freed by send_and_free_messages().
if ((ret = sdk->send_and_free_messages(msgs.msgs, count)) != ERROR_SUCCESS) {
srs_error("forwarder messages to server failed. ret=%d", ret);
return ret;
return srs_error_new(ret, "send messages");
}
}
return ret;
return err;
}

@ -70,33 +70,33 @@ public:
SrsForwarder(SrsOriginHub* h);
virtual ~SrsForwarder();
public:
virtual int initialize(SrsRequest* r, std::string ep);
virtual srs_error_t initialize(SrsRequest* r, std::string ep);
virtual void set_queue_size(double queue_size);
public:
virtual int on_publish();
virtual srs_error_t on_publish();
virtual void on_unpublish();
/**
* forward the audio packet.
* @param shared_metadata, directly ptr, copy it if need to save it.
*/
virtual int on_meta_data(SrsSharedPtrMessage* shared_metadata);
virtual srs_error_t on_meta_data(SrsSharedPtrMessage* shared_metadata);
/**
* forward the audio packet.
* @param shared_audio, directly ptr, copy it if need to save it.
*/
virtual int on_audio(SrsSharedPtrMessage* shared_audio);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio);
/**
* forward the video packet.
* @param shared_video, directly ptr, copy it if need to save it.
*/
virtual int on_video(SrsSharedPtrMessage* shared_video);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video);
// interface ISrsReusableThread2Handler.
public:
virtual srs_error_t cycle();
private:
virtual srs_error_t do_cycle();
private:
virtual int forward();
virtual srs_error_t forward();
};
#endif

@ -99,6 +99,7 @@ srs_error_t SrsBufferCache::start()
int SrsBufferCache::dump_cache(SrsConsumer* consumer, SrsRtmpJitterAlgorithm jitter)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (fast_cache <= 0) {
srs_info("http: ignore dump fast cache.");
@ -106,7 +107,10 @@ int SrsBufferCache::dump_cache(SrsConsumer* consumer, SrsRtmpJitterAlgorithm jit
}
// the jitter is get from SrsSource, which means the time_jitter of vhost.
if ((ret = queue->dump_packets(consumer, false, jitter)) != ERROR_SUCCESS) {
if ((err = queue->dump_packets(consumer, false, jitter)) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
}
@ -118,7 +122,6 @@ int SrsBufferCache::dump_cache(SrsConsumer* consumer, SrsRtmpJitterAlgorithm jit
srs_error_t SrsBufferCache::cycle()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// TODO: FIXME: support reload.
@ -130,8 +133,8 @@ srs_error_t SrsBufferCache::cycle()
// the stream cache will create consumer to cache stream,
// which will trigger to fetch stream from origin for edge.
SrsConsumer* consumer = NULL;
if ((ret = source->create_consumer(NULL, consumer, false, false, true)) != ERROR_SUCCESS) {
return srs_error_new(ret, "create consumer");
if ((err = source->create_consumer(NULL, consumer, false, false, true)) != srs_success) {
return srs_error_wrap(err, "create consumer");
}
SrsAutoFree(SrsConsumer, consumer);
@ -154,8 +157,8 @@ srs_error_t SrsBufferCache::cycle()
// get messages from consumer.
// each msg in msgs.msgs must be free, for the SrsMessageArray never free them.
int count = 0;
if ((ret = consumer->dump_packets(&msgs, count)) != ERROR_SUCCESS) {
return srs_error_new(ret, "consumer dump packets");
if ((err = consumer->dump_packets(&msgs, count)) != srs_success) {
return srs_error_wrap(err, "consumer dump packets");
}
if (count <= 0) {
@ -521,8 +524,8 @@ srs_error_t SrsLiveStream::serve_http(ISrsHttpResponseWriter* w, ISrsHttpMessage
// create consumer of souce, ignore gop cache, use the audio gop cache.
SrsConsumer* consumer = NULL;
if ((ret = source->create_consumer(NULL, consumer, true, true, !enc->has_cache())) != ERROR_SUCCESS) {
return srs_error_new(ret, "create consumer");
if ((err = source->create_consumer(NULL, consumer, true, true, !enc->has_cache())) != srs_success) {
return srs_error_wrap(err, "create consumer");
}
SrsAutoFree(SrsConsumer, consumer);
srs_verbose("http: consumer created success.");
@ -579,8 +582,8 @@ srs_error_t SrsLiveStream::serve_http(ISrsHttpResponseWriter* w, ISrsHttpMessage
// get messages from consumer.
// each msg in msgs.msgs must be free, for the SrsMessageArray never free them.
int count = 0;
if ((ret = consumer->dump_packets(&msgs, count)) != ERROR_SUCCESS) {
return srs_error_new(ret, "consumer dump packets");
if ((err = consumer->dump_packets(&msgs, count)) != srs_success) {
return srs_error_wrap(err, "consumer dump packets");
}
if (count <= 0) {
@ -977,8 +980,8 @@ srs_error_t SrsHttpStreamServer::hijack(ISrsHttpMessage* request, ISrsHttpHandle
}
SrsSource* s = NULL;
if ((ret = SrsSource::fetch_or_create(r, server, &s)) != ERROR_SUCCESS) {
return srs_error_new(ret, "source create");
if ((err = SrsSource::fetch_or_create(r, server, &s)) != srs_success) {
return srs_error_wrap(err, "source create");
}
srs_assert(s != NULL);

@ -368,6 +368,7 @@ void SrsPublishRecvThread::stop()
int SrsPublishRecvThread::consume(SrsCommonMessage* msg)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// when cid changed, change it.
if (ncid != cid) {
@ -386,7 +387,10 @@ int SrsPublishRecvThread::consume(SrsCommonMessage* msg)
srs_update_system_time_ms(), msg->header.timestamp, msg->size);
// the rtmp connection will handle this message
ret = _conn->handle_publish_message(_source, msg);
err = _conn->handle_publish_message(_source, msg);
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
// must always free it,
// the source will copy it if need to use.

File diff suppressed because it is too large Load Diff

@ -148,24 +148,24 @@ public:
virtual void cleanup();
private:
// when valid and connected to vhost/app, service the client.
virtual int service_cycle();
virtual srs_error_t service_cycle();
// stream(play/publish) service cycle, identify client first.
virtual int stream_service_cycle();
virtual int check_vhost(bool try_default_vhost);
virtual int playing(SrsSource* source);
virtual int do_playing(SrsSource* source, SrsConsumer* consumer, SrsQueueRecvThread* trd);
virtual int publishing(SrsSource* source);
virtual int do_publishing(SrsSource* source, SrsPublishRecvThread* trd);
virtual int acquire_publish(SrsSource* source);
virtual srs_error_t stream_service_cycle();
virtual srs_error_t check_vhost(bool try_default_vhost);
virtual srs_error_t playing(SrsSource* source);
virtual srs_error_t do_playing(SrsSource* source, SrsConsumer* consumer, SrsQueueRecvThread* trd);
virtual srs_error_t publishing(SrsSource* source);
virtual srs_error_t do_publishing(SrsSource* source, SrsPublishRecvThread* trd);
virtual srs_error_t acquire_publish(SrsSource* source);
virtual void release_publish(SrsSource* source);
virtual int handle_publish_message(SrsSource* source, SrsCommonMessage* msg);
virtual int process_publish_message(SrsSource* source, SrsCommonMessage* msg);
virtual int process_play_control_msg(SrsConsumer* consumer, SrsCommonMessage* msg);
virtual srs_error_t handle_publish_message(SrsSource* source, SrsCommonMessage* msg);
virtual srs_error_t process_publish_message(SrsSource* source, SrsCommonMessage* msg);
virtual srs_error_t process_play_control_msg(SrsConsumer* consumer, SrsCommonMessage* msg);
virtual void change_mw_sleep(int sleep_ms);
virtual void set_sock_options();
private:
virtual int check_edge_token_traverse_auth();
virtual int do_token_traverse_auth(SrsRtmpClient* client);
virtual srs_error_t check_edge_token_traverse_auth();
virtual srs_error_t do_token_traverse_auth(SrsRtmpClient* client);
private:
/**
* when the connection disconnect, call this method.

@ -484,6 +484,7 @@ int SrsRtspConn::kickoff_audio_cache(SrsRtpPacket* pkt, int64_t dts)
int SrsRtspConn::write_sequence_header()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// use the current dts.
int64_t dts = vjitter->timestamp() / 90;
@ -500,7 +501,10 @@ int SrsRtspConn::write_sequence_header()
SrsFormat* format = new SrsFormat();
SrsAutoFree(SrsFormat, format);
if ((ret = format->on_aac_sequence_header((char*)sh.c_str(), (int)sh.length())) != ERROR_SUCCESS) {
if ((err = format->on_aac_sequence_header((char*)sh.c_str(), (int)sh.length())) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
return ret;
}

@ -1389,15 +1389,16 @@ srs_error_t SrsServer::on_reload_http_stream_updated()
return err;
}
int SrsServer::on_publish(SrsSource* s, SrsRequest* r)
srs_error_t SrsServer::on_publish(SrsSource* s, SrsRequest* r)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = http_server->http_mount(s, r)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "http mount");
}
return ret;
return err;
}
void SrsServer::on_unpublish(SrsSource* s, SrsRequest* r)

@ -382,7 +382,7 @@ public:
virtual srs_error_t on_reload_http_stream_updated();
// interface ISrsSourceHandler
public:
virtual int on_publish(SrsSource* s, SrsRequest* r);
virtual srs_error_t on_publish(SrsSource* s, SrsRequest* r);
virtual void on_unpublish(SrsSource* s, SrsRequest* r);
};

File diff suppressed because it is too large Load Diff

@ -98,7 +98,7 @@ public:
/**
* get current client time, the last packet time.
*/
virtual int get_time();
virtual int64_t get_time();
};
#ifdef SRS_PERF_QUEUE_FAST_VECTOR
@ -168,19 +168,19 @@ public:
* @param msg, the msg to enqueue, user never free it whatever the return code.
* @param is_overflow, whether overflow and shrinked. NULL to ignore.
*/
virtual int enqueue(SrsSharedPtrMessage* msg, bool* is_overflow = NULL);
virtual srs_error_t enqueue(SrsSharedPtrMessage* msg, bool* is_overflow = NULL);
/**
* get packets in consumer queue.
* @pmsgs SrsSharedPtrMessage*[], used to store the msgs, user must alloc it.
* @count the count in array, output param.
* @max_count the max count to dequeue, must be positive.
*/
virtual int dump_packets(int max_count, SrsSharedPtrMessage** pmsgs, int& count);
virtual srs_error_t dump_packets(int max_count, SrsSharedPtrMessage** pmsgs, int& count);
/**
* dumps packets to consumer, use specified args.
* @remark the atc/tba/tbv/ag are same to SrsConsumer.enqueue().
*/
virtual int dump_packets(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm ag);
virtual srs_error_t dump_packets(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm ag);
private:
/**
* remove a gop from the front.
@ -250,21 +250,21 @@ public:
/**
* get current client time, the last packet time.
*/
virtual int get_time();
virtual int64_t get_time();
/**
* enqueue an shared ptr message.
* @param shared_msg, directly ptr, copy it if need to save it.
* @param whether atc, donot use jitter correct if true.
* @param ag the algorithm of time jitter.
*/
virtual int enqueue(SrsSharedPtrMessage* shared_msg, bool atc, SrsRtmpJitterAlgorithm ag);
virtual srs_error_t enqueue(SrsSharedPtrMessage* shared_msg, bool atc, SrsRtmpJitterAlgorithm ag);
/**
* get packets in consumer queue.
* @param msgs the msgs array to dump packets to send.
* @param count the count in array, intput and output param.
* @remark user can specifies the count to get specified msgs; 0 to get all if possible.
*/
virtual int dump_packets(SrsMessageArray* msgs, int& count);
virtual srs_error_t dump_packets(SrsMessageArray* msgs, int& count);
#ifdef SRS_PERF_QUEUE_COND_WAIT
/**
* wait for messages incomming, atleast nb_msgs and in duration.
@ -276,7 +276,7 @@ public:
/**
* when client send the pause message.
*/
virtual int on_play_client_pause(bool is_pause);
virtual srs_error_t on_play_client_pause(bool is_pause);
// ISrsWakable
public:
/**
@ -341,7 +341,7 @@ public:
* 2. clear gop when got keyframe.
* @param shared_msg, directly ptr, copy it if need to save it.
*/
virtual int cache(SrsSharedPtrMessage* shared_msg);
virtual srs_error_t cache(SrsSharedPtrMessage* shared_msg);
/**
* clear the gop cache.
*/
@ -349,7 +349,7 @@ public:
/**
* dump the cached gop to consumer.
*/
virtual int dump(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm jitter_algorithm);
virtual srs_error_t dump(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm jitter_algorithm);
/**
* used for atc to get the time of gop cache,
* the atc will adjust the sequence header timestamp to gop cache.
@ -381,7 +381,7 @@ public:
/**
* when stream start publish, mount stream.
*/
virtual int on_publish(SrsSource* s, SrsRequest* r) = 0;
virtual srs_error_t on_publish(SrsSource* s, SrsRequest* r) = 0;
/**
* when stream stop publish, unmount stream.
*/
@ -454,22 +454,22 @@ public:
virtual srs_error_t cycle();
public:
// When got a parsed metadata.
virtual int on_meta_data(SrsSharedPtrMessage* shared_metadata, SrsOnMetaDataPacket* packet);
virtual srs_error_t on_meta_data(SrsSharedPtrMessage* shared_metadata, SrsOnMetaDataPacket* packet);
// When got a parsed audio packet.
virtual int on_audio(SrsSharedPtrMessage* shared_audio);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio);
// When got a parsed video packet.
virtual int on_video(SrsSharedPtrMessage* shared_video, bool is_sequence_header);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video, bool is_sequence_header);
public:
// When start publish stream.
virtual int on_publish();
virtual srs_error_t on_publish();
// When stop publish stream.
virtual void on_unpublish();
// Internal callback.
public:
// for the SrsForwarder to callback to request the sequence headers.
virtual int on_forwarder_start(SrsForwarder* forwarder);
virtual srs_error_t on_forwarder_start(SrsForwarder* forwarder);
// for the SrsDvr to callback to request the sequence headers.
virtual int on_dvr_request_sh();
virtual srs_error_t on_dvr_request_sh();
// interface ISrsReloadHandler
public:
virtual srs_error_t on_reload_vhost_forward(std::string vhost);
@ -480,7 +480,7 @@ public:
virtual srs_error_t on_reload_vhost_transcode(std::string vhost);
virtual srs_error_t on_reload_vhost_exec(std::string vhost);
private:
virtual int create_forwarders();
virtual srs_error_t create_forwarders();
virtual void destroy_forwarders();
};
@ -518,14 +518,14 @@ public:
// Dumps cached metadata to consumer.
// @param dm Whether dumps the metadata.
// @param ds Whether dumps the sequence header.
virtual int dumps(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm ag, bool dm, bool ds);
virtual srs_error_t dumps(SrsConsumer* consumer, bool atc, SrsRtmpJitterAlgorithm ag, bool dm, bool ds);
public:
// Update the cached metadata by packet.
virtual int update_data(SrsMessageHeader* header, SrsOnMetaDataPacket* metadata, bool& updated);
virtual srs_error_t update_data(SrsMessageHeader* header, SrsOnMetaDataPacket* metadata, bool& updated);
// Update the cached audio sequence header.
virtual int update_ash(SrsSharedPtrMessage* msg);
virtual srs_error_t update_ash(SrsSharedPtrMessage* msg);
// Update the cached video sequence header.
virtual int update_vsh(SrsSharedPtrMessage* msg);
virtual srs_error_t update_vsh(SrsSharedPtrMessage* msg);
};
/**
@ -543,7 +543,7 @@ public:
* @param h the event handler for source.
* @param pps the matched source, if success never be NULL.
*/
static int fetch_or_create(SrsRequest* r, ISrsSourceHandler* h, SrsSource** pps);
static srs_error_t fetch_or_create(SrsRequest* r, ISrsSourceHandler* h, SrsSource** pps);
private:
/**
* get the exists source, NULL when not exists.
@ -635,30 +635,30 @@ public:
// for the tools callback
public:
// source id changed.
virtual int on_source_id_changed(int id);
virtual srs_error_t on_source_id_changed(int id);
// get current source id.
virtual int source_id();
virtual int pre_source_id();
// logic data methods
public:
virtual bool can_publish(bool is_edge);
virtual int on_meta_data(SrsCommonMessage* msg, SrsOnMetaDataPacket* metadata);
virtual srs_error_t on_meta_data(SrsCommonMessage* msg, SrsOnMetaDataPacket* metadata);
public:
virtual int on_audio(SrsCommonMessage* audio);
virtual srs_error_t on_audio(SrsCommonMessage* audio);
private:
virtual int on_audio_imp(SrsSharedPtrMessage* audio);
virtual srs_error_t on_audio_imp(SrsSharedPtrMessage* audio);
public:
virtual int on_video(SrsCommonMessage* video);
virtual srs_error_t on_video(SrsCommonMessage* video);
private:
virtual int on_video_imp(SrsSharedPtrMessage* video);
virtual srs_error_t on_video_imp(SrsSharedPtrMessage* video);
public:
virtual int on_aggregate(SrsCommonMessage* msg);
virtual srs_error_t on_aggregate(SrsCommonMessage* msg);
/**
* publish stream event notify.
* @param _req the request from client, the source will deep copy it,
* for when reload the request of client maybe invalid.
*/
virtual int on_publish();
virtual srs_error_t on_publish();
virtual void on_unpublish();
// consumer methods
public:
@ -669,16 +669,16 @@ public:
* @param dm, whether dumps the metadata.
* @param dg, whether dumps the gop cache.
*/
virtual int create_consumer(SrsConnection* conn, SrsConsumer*& consumer, bool ds = true, bool dm = true, bool dg = true);
virtual srs_error_t create_consumer(SrsConnection* conn, SrsConsumer*& consumer, bool ds = true, bool dm = true, bool dg = true);
virtual void on_consumer_destroy(SrsConsumer* consumer);
virtual void set_cache(bool enabled);
virtual SrsRtmpJitterAlgorithm jitter();
// internal
public:
// for edge, when publish edge stream, check the state
virtual int on_edge_start_publish();
virtual srs_error_t on_edge_start_publish();
// for edge, proxy the publish
virtual int on_edge_proxy_publish(SrsCommonMessage* msg);
virtual srs_error_t on_edge_proxy_publish(SrsCommonMessage* msg);
// for edge, proxy stop publish
virtual void on_edge_proxy_unpublish();
public:

@ -420,30 +420,28 @@ SrsFrame::~SrsFrame()
srs_freep(codec);
}
int SrsFrame::initialize(SrsCodecConfig* c)
srs_error_t SrsFrame::initialize(SrsCodecConfig* c)
{
codec = c;
nb_samples = 0;
dts = 0;
cts = 0;
return ERROR_SUCCESS;
return srs_success;
}
int SrsFrame::add_sample(char* bytes, int size)
srs_error_t SrsFrame::add_sample(char* bytes, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (nb_samples >= SrsMaxNbSamples) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("Frame samples overflow, max=%d. ret=%d", SrsMaxNbSamples, ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "Frame samples overflow");
}
SrsSample* sample = &samples[nb_samples++];
sample->bytes = bytes;
sample->size = size;
return ret;
return err;
}
SrsAudioFrame::SrsAudioFrame()
@ -472,12 +470,12 @@ SrsVideoFrame::~SrsVideoFrame()
{
}
int SrsVideoFrame::add_sample(char* bytes, int size)
srs_error_t SrsVideoFrame::add_sample(char* bytes, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = SrsFrame::add_sample(bytes, size)) != ERROR_SUCCESS) {
return ret;
if ((err = SrsFrame::add_sample(bytes, size)) != srs_success) {
return srs_error_wrap(err, "add frame");
}
// for video, parse the nalu type, set the IDR flag.
@ -495,7 +493,7 @@ int SrsVideoFrame::add_sample(char* bytes, int size)
first_nalu_type = nal_unit_type;
}
return ret;
return err;
}
SrsVideoCodecConfig* SrsVideoFrame::vcodec()
@ -529,24 +527,23 @@ srs_error_t SrsFormat::initialize()
return srs_success;
}
int SrsFormat::on_audio(int64_t timestamp, char* data, int size)
srs_error_t SrsFormat::on_audio(int64_t timestamp, char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!data || size <= 0) {
srs_trace("no audio present, ignore it.");
return ret;
return err;
}
if ((ret = buffer->initialize(data, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init buffer");
}
// audio decode
if (!buffer->require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("aac decode sound_format failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "aac decode sound_format");
}
// @see: E.4.2 Audio Tags, video_file_format_spec_v10_1.pdf, page 76
@ -554,7 +551,7 @@ int SrsFormat::on_audio(int64_t timestamp, char* data, int size)
SrsAudioCodecId codec = (SrsAudioCodecId)((v >> 4) & 0x0f);
if (codec != SrsAudioCodecIdMP3 && codec != SrsAudioCodecIdAAC) {
return ret;
return err;
}
if (!acodec) {
@ -564,8 +561,8 @@ int SrsFormat::on_audio(int64_t timestamp, char* data, int size)
audio = new SrsAudioFrame();
}
if ((ret = audio->initialize(acodec)) != ERROR_SUCCESS) {
return ret;
if ((err = audio->initialize(acodec)) != srs_success) {
return srs_error_wrap(err, "init audio");
}
// Parse by specified codec.
@ -578,24 +575,23 @@ int SrsFormat::on_audio(int64_t timestamp, char* data, int size)
return audio_aac_demux(buffer, timestamp);
}
int SrsFormat::on_video(int64_t timestamp, char* data, int size)
srs_error_t SrsFormat::on_video(int64_t timestamp, char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!data || size <= 0) {
srs_trace("no video present, ignore it.");
return ret;
return err;
}
if ((ret = buffer->initialize(data, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init buffer");
}
// video decode
if (!buffer->require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode frame_type failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode frame_type");
}
// @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78
@ -604,7 +600,7 @@ int SrsFormat::on_video(int64_t timestamp, char* data, int size)
// TODO: Support other codecs.
if (codec_id != SrsVideoCodecIdAVC) {
return ret;
return err;
}
if (!vcodec) {
@ -614,17 +610,17 @@ int SrsFormat::on_video(int64_t timestamp, char* data, int size)
video = new SrsVideoFrame();
}
if ((ret = video->initialize(vcodec)) != ERROR_SUCCESS) {
return ret;
if ((err = video->initialize(vcodec)) != srs_success) {
return srs_error_wrap(err, "init video");
}
buffer->skip(-1 * buffer->pos());
return video_avc_demux(buffer, timestamp);
}
int SrsFormat::on_aac_sequence_header(char* data, int size)
srs_error_t SrsFormat::on_aac_sequence_header(char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (!acodec) {
acodec = new SrsAudioCodecConfig();
@ -633,8 +629,8 @@ int SrsFormat::on_aac_sequence_header(char* data, int size)
audio = new SrsAudioFrame();
}
if ((ret = audio->initialize(acodec)) != ERROR_SUCCESS) {
return ret;
if ((err = audio->initialize(acodec)) != srs_success) {
return srs_error_wrap(err, "init audio");
}
return audio_aac_sequence_header_demux(data, size);
@ -652,9 +648,9 @@ bool SrsFormat::is_avc_sequence_header()
&& video && video->avc_packet_type == SrsVideoAvcFrameTraitSequenceHeader;
}
int SrsFormat::video_avc_demux(SrsBuffer* stream, int64_t timestamp)
srs_error_t SrsFormat::video_avc_demux(SrsBuffer* stream, int64_t timestamp)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78
int8_t frame_type = stream->read_1bytes();
@ -666,22 +662,18 @@ int SrsFormat::video_avc_demux(SrsBuffer* stream, int64_t timestamp)
// ignore info frame without error,
// @see https://github.com/ossrs/srs/issues/288#issuecomment-69863909
if (video->frame_type == SrsVideoAvcFrameTypeVideoInfoFrame) {
srs_warn("avc igone the info frame, ret=%d", ret);
return ret;
srs_warn("avc igone the info frame");
return err;
}
// only support h.264/avc
if (codec_id != SrsVideoCodecIdAVC) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc only support video h.264/avc codec. actual=%d, ret=%d", codec_id, ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "avc only support video h.264/avc, actual=%d", codec_id);
}
vcodec->id = codec_id;
if (!stream->require(4)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode avc_packet_type failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "avc decode avc_packet_type");
}
int8_t avc_packet_type = stream->read_1bytes();
int32_t composition_time = stream->read_3bytes();
@ -696,27 +688,22 @@ int SrsFormat::video_avc_demux(SrsBuffer* stream, int64_t timestamp)
nb_raw = stream->size() - stream->pos();
if (avc_packet_type == SrsVideoAvcFrameTraitSequenceHeader) {
if ((ret = avc_demux_sps_pps(stream)) != ERROR_SUCCESS) {
return ret;
if ((err = avc_demux_sps_pps(stream)) != srs_success) {
return srs_error_wrap(err, "demux SPS/PPS");
}
} else if (avc_packet_type == SrsVideoAvcFrameTraitNALU){
if ((ret = video_nalu_demux(stream)) != ERROR_SUCCESS) {
return ret;
if ((err = video_nalu_demux(stream)) != srs_success) {
return srs_error_wrap(err, "demux NALU");
}
} else {
// ignored.
}
srs_info("avc decoded, type=%d, codec=%d, avc=%d, cts=%d, size=%d", frame_type, codec_id, avc_packet_type,
composition_time, stream->size() - stream->pos());
return ret;
return err;
}
int SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
srs_error_t SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
{
int ret = ERROR_SUCCESS;
// AVCDecoderConfigurationRecord
// 5.2.4.1.1 Syntax, ISO_IEC_14496-15-AVC-format-2012.pdf, page 16
int avc_extra_size = stream->size() - stream->pos();
@ -726,9 +713,7 @@ int SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
}
if (!stream->require(6)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "avc decode sequence header");
}
//int8_t configurationVersion = stream->read_1bytes();
stream->read_1bytes();
@ -750,35 +735,25 @@ int SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
// The value of this field shall be one of 0, 1, or 3 corresponding to a
// length encoded with 1, 2, or 4 bytes, respectively.
if (vcodec->NAL_unit_length == 2) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps lengthSizeMinusOne should never be 2. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps lengthSizeMinusOne should never be 2");
}
// 1 sps, 7.3.2.1 Sequence parameter set RBSP syntax
// ISO_IEC_14496-10-AVC-2003.pdf, page 45.
if (!stream->require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header sps failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode SPS");
}
int8_t numOfSequenceParameterSets = stream->read_1bytes();
numOfSequenceParameterSets &= 0x1f;
if (numOfSequenceParameterSets != 1) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header sps failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode SPS");
}
if (!stream->require(2)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header sps size failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode SPS size");
}
uint16_t sequenceParameterSetLength = stream->read_2bytes();
if (!stream->require(sequenceParameterSetLength)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header sps data failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode SPS data");
}
if (sequenceParameterSetLength > 0) {
vcodec->sequenceParameterSetNALUnit.resize(sequenceParameterSetLength);
@ -786,27 +761,19 @@ int SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
}
// 1 pps
if (!stream->require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header pps failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode PPS");
}
int8_t numOfPictureParameterSets = stream->read_1bytes();
numOfPictureParameterSets &= 0x1f;
if (numOfPictureParameterSets != 1) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header pps failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode PPS");
}
if (!stream->require(2)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header pps size failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode PPS size");
}
uint16_t pictureParameterSetLength = stream->read_2bytes();
if (!stream->require(pictureParameterSetLength)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sequenc header pps data failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode PPS data");
}
if (pictureParameterSetLength > 0) {
vcodec->pictureParameterSetNALUnit.resize(pictureParameterSetLength);
@ -816,45 +783,40 @@ int SrsFormat::avc_demux_sps_pps(SrsBuffer* stream)
return avc_demux_sps();
}
int SrsFormat::avc_demux_sps()
srs_error_t SrsFormat::avc_demux_sps()
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if (vcodec->sequenceParameterSetNALUnit.empty()) {
return ret;
return err;
}
SrsBuffer stream;
char* sps = &vcodec->sequenceParameterSetNALUnit[0];
int nbsps = (int)vcodec->sequenceParameterSetNALUnit.size();
if ((ret = stream.initialize(sps, nbsps)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init stream");
}
// for NALU, 7.3.1 NAL unit syntax
// ISO_IEC_14496-10-AVC-2012.pdf, page 61.
if (!stream.require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode sps failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "decode SPS");
}
int8_t nutv = stream.read_1bytes();
// forbidden_zero_bit shall be equal to 0.
int8_t forbidden_zero_bit = (nutv >> 7) & 0x01;
if (forbidden_zero_bit) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("forbidden_zero_bit shall be equal to 0. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "forbidden_zero_bit shall be equal to 0");
}
// nal_ref_idc not equal to 0 specifies that the content of the NAL unit contains a sequence parameter set or a picture
// parameter set or a slice of a reference picture or a slice data partition of a reference picture.
int8_t nal_ref_idc = (nutv >> 5) & 0x03;
if (!nal_ref_idc) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("for sps, nal_ref_idc shall be not be equal to 0. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "for sps, nal_ref_idc shall be not be equal to 0");
}
// 7.4.1 NAL unit semantics
@ -862,9 +824,7 @@ int SrsFormat::avc_demux_sps()
// nal_unit_type specifies the type of RBSP data structure contained in the NAL unit as specified in Table 7-1.
SrsAvcNaluType nal_unit_type = (SrsAvcNaluType)(nutv & 0x1f);
if (nal_unit_type != 7) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("for sps, nal_unit_type shall be equal to 7. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "for sps, nal_unit_type shall be equal to 7");
}
// decode the rbsp from sps.
@ -894,106 +854,95 @@ int SrsFormat::avc_demux_sps()
}
int SrsFormat::avc_demux_sps_rbsp(char* rbsp, int nb_rbsp)
srs_error_t SrsFormat::avc_demux_sps_rbsp(char* rbsp, int nb_rbsp)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// we donot parse the detail of sps.
// @see https://github.com/ossrs/srs/issues/474
if (!avc_parse_sps) {
return ret;
return err;
}
// reparse the rbsp.
SrsBuffer stream;
if ((ret = stream.initialize(rbsp, nb_rbsp)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init stream");
}
// for SPS, 7.3.2.1.1 Sequence parameter set data syntax
// ISO_IEC_14496-10-AVC-2012.pdf, page 62.
if (!stream.require(3)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps shall atleast 3bytes. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps shall atleast 3bytes");
}
uint8_t profile_idc = stream.read_1bytes();
if (!profile_idc) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps the profile_idc invalid. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps the profile_idc invalid");
}
int8_t flags = stream.read_1bytes();
if (flags & 0x03) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps the flags invalid. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps the flags invalid");
}
uint8_t level_idc = stream.read_1bytes();
if (!level_idc) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps the level_idc invalid. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps the level_idc invalid");
}
SrsBitBuffer bs;
if ((ret = bs.initialize(&stream)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init bit buffer");
}
int32_t seq_parameter_set_id = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, seq_parameter_set_id)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read seq_parameter_set_id");
}
if (seq_parameter_set_id < 0) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps the seq_parameter_set_id invalid. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps the seq_parameter_set_id invalid");
}
srs_info("sps parse profile=%d, level=%d, sps_id=%d", profile_idc, level_idc, seq_parameter_set_id);
int32_t chroma_format_idc = -1;
if (profile_idc == 100 || profile_idc == 110 || profile_idc == 122 || profile_idc == 244
|| profile_idc == 44 || profile_idc == 83 || profile_idc == 86 || profile_idc == 118
|| profile_idc == 128
) {
|| profile_idc == 128) {
if ((ret = srs_avc_nalu_read_uev(&bs, chroma_format_idc)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read chroma_format_idc");
}
if (chroma_format_idc == 3) {
int8_t separate_colour_plane_flag = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, separate_colour_plane_flag)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read separate_colour_plane_flag");
}
}
int32_t bit_depth_luma_minus8 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, bit_depth_luma_minus8)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read bit_depth_luma_minus8");;
}
int32_t bit_depth_chroma_minus8 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, bit_depth_chroma_minus8)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read bit_depth_chroma_minus8");;
}
int8_t qpprime_y_zero_transform_bypass_flag = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, qpprime_y_zero_transform_bypass_flag)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read qpprime_y_zero_transform_bypass_flag");;
}
int8_t seq_scaling_matrix_present_flag = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, seq_scaling_matrix_present_flag)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read seq_scaling_matrix_present_flag");;
}
if (seq_scaling_matrix_present_flag) {
int nb_scmpfs = ((chroma_format_idc != 3)? 8:12);
for (int i = 0; i < nb_scmpfs; i++) {
int8_t seq_scaling_matrix_present_flag_i = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, seq_scaling_matrix_present_flag_i)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read seq_scaling_matrix_present_flag_i");;
}
}
}
@ -1001,147 +950,140 @@ int SrsFormat::avc_demux_sps_rbsp(char* rbsp, int nb_rbsp)
int32_t log2_max_frame_num_minus4 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, log2_max_frame_num_minus4)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read log2_max_frame_num_minus4");;
}
int32_t pic_order_cnt_type = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, pic_order_cnt_type)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read pic_order_cnt_type");;
}
if (pic_order_cnt_type == 0) {
int32_t log2_max_pic_order_cnt_lsb_minus4 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, log2_max_pic_order_cnt_lsb_minus4)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read log2_max_pic_order_cnt_lsb_minus4");;
}
} else if (pic_order_cnt_type == 1) {
int8_t delta_pic_order_always_zero_flag = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, delta_pic_order_always_zero_flag)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read delta_pic_order_always_zero_flag");;
}
int32_t offset_for_non_ref_pic = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, offset_for_non_ref_pic)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read offset_for_non_ref_pic");;
}
int32_t offset_for_top_to_bottom_field = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, offset_for_top_to_bottom_field)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read offset_for_top_to_bottom_field");;
}
int32_t num_ref_frames_in_pic_order_cnt_cycle = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, num_ref_frames_in_pic_order_cnt_cycle)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read num_ref_frames_in_pic_order_cnt_cycle");;
}
if (num_ref_frames_in_pic_order_cnt_cycle < 0) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("sps the num_ref_frames_in_pic_order_cnt_cycle invalid. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "sps the num_ref_frames_in_pic_order_cnt_cycle");
}
for (int i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; i++) {
int32_t offset_for_ref_frame_i = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, offset_for_ref_frame_i)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read offset_for_ref_frame_i");;
}
}
}
int32_t max_num_ref_frames = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, max_num_ref_frames)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read max_num_ref_frames");;
}
int8_t gaps_in_frame_num_value_allowed_flag = -1;
if ((ret = srs_avc_nalu_read_bit(&bs, gaps_in_frame_num_value_allowed_flag)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read gaps_in_frame_num_value_allowed_flag");;
}
int32_t pic_width_in_mbs_minus1 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, pic_width_in_mbs_minus1)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read pic_width_in_mbs_minus1");;
}
int32_t pic_height_in_map_units_minus1 = -1;
if ((ret = srs_avc_nalu_read_uev(&bs, pic_height_in_map_units_minus1)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "read pic_height_in_map_units_minus1");;
}
vcodec->width = (int)(pic_width_in_mbs_minus1 + 1) * 16;
vcodec->height = (int)(pic_height_in_map_units_minus1 + 1) * 16;
return ret;
return err;
}
int SrsFormat::video_nalu_demux(SrsBuffer* stream)
srs_error_t SrsFormat::video_nalu_demux(SrsBuffer* stream)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// ensure the sequence header demuxed
if (!vcodec->is_avc_codec_ok()) {
srs_warn("avc ignore type=%d for no sequence header. ret=%d", SrsVideoAvcFrameTraitNALU, ret);
return ret;
srs_warn("avc ignore type=%d for no sequence header", SrsVideoAvcFrameTraitNALU);
return err;
}
// guess for the first time.
if (vcodec->payload_format == SrsAvcPayloadFormatGuess) {
// One or more NALUs (Full frames are required)
// try "AnnexB" from ISO_IEC_14496-10-AVC-2003.pdf, page 211.
if ((ret = avc_demux_annexb_format(stream)) != ERROR_SUCCESS) {
if ((err = avc_demux_annexb_format(stream)) != srs_success) {
// stop try when system error.
if (ret != ERROR_HLS_AVC_TRY_OTHERS) {
srs_error("avc demux for annexb failed. ret=%d", ret);
return ret;
if (srs_error_code(err) != ERROR_HLS_AVC_TRY_OTHERS) {
return srs_error_wrap(err, "avc demux for annexb");
}
srs_freep(err);
// try "ISO Base Media File Format" from ISO_IEC_14496-15-AVC-format-2012.pdf, page 20
if ((ret = avc_demux_ibmf_format(stream)) != ERROR_SUCCESS) {
return ret;
if ((err = avc_demux_ibmf_format(stream)) != srs_success) {
return srs_error_wrap(err, "avc demux ibmf");
} else {
vcodec->payload_format = SrsAvcPayloadFormatIbmf;
srs_info("hls guess avc payload is ibmf format.");
}
} else {
vcodec->payload_format = SrsAvcPayloadFormatAnnexb;
srs_info("hls guess avc payload is annexb format.");
}
} else if (vcodec->payload_format == SrsAvcPayloadFormatIbmf) {
// try "ISO Base Media File Format" from ISO_IEC_14496-15-AVC-format-2012.pdf, page 20
if ((ret = avc_demux_ibmf_format(stream)) != ERROR_SUCCESS) {
return ret;
if ((err = avc_demux_ibmf_format(stream)) != srs_success) {
return srs_error_wrap(err, "avc demux ibmf");
}
srs_info("hls decode avc payload in ibmf format.");
} else {
// One or more NALUs (Full frames are required)
// try "AnnexB" from ISO_IEC_14496-10-AVC-2003.pdf, page 211.
if ((ret = avc_demux_annexb_format(stream)) != ERROR_SUCCESS) {
if ((err = avc_demux_annexb_format(stream)) != srs_success) {
// ok, we guess out the payload is annexb, but maybe changed to ibmf.
if (ret != ERROR_HLS_AVC_TRY_OTHERS) {
srs_error("avc demux for annexb failed. ret=%d", ret);
return ret;
if (srs_error_code(err) != ERROR_HLS_AVC_TRY_OTHERS) {
return srs_error_wrap(err, "avc demux annexb");
}
srs_freep(err);
// try "ISO Base Media File Format" from ISO_IEC_14496-15-AVC-format-2012.pdf, page 20
if ((ret = avc_demux_ibmf_format(stream)) != ERROR_SUCCESS) {
return ret;
if ((err = avc_demux_ibmf_format(stream)) != srs_success) {
return srs_error_wrap(err, "avc demux ibmf");
} else {
vcodec->payload_format = SrsAvcPayloadFormatIbmf;
srs_warn("hls avc payload change from annexb to ibmf format.");
}
}
srs_info("hls decode avc payload in annexb format.");
}
return ret;
return err;
}
int SrsFormat::avc_demux_annexb_format(SrsBuffer* stream)
srs_error_t SrsFormat::avc_demux_annexb_format(SrsBuffer* stream)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// not annexb, try others
if (!srs_avc_startswith_annexb(stream, NULL)) {
return ERROR_HLS_AVC_TRY_OTHERS;
return srs_error_new(ERROR_HLS_AVC_TRY_OTHERS, "try others");
}
// AnnexB
@ -1151,7 +1093,7 @@ int SrsFormat::avc_demux_annexb_format(SrsBuffer* stream)
// find start code
int nb_start_code = 0;
if (!srs_avc_startswith_annexb(stream, &nb_start_code)) {
return ret;
return err;
}
// skip the start code.
@ -1179,18 +1121,17 @@ int SrsFormat::avc_demux_annexb_format(SrsBuffer* stream)
}
// got the NALU.
if ((ret = video->add_sample(p, (int)(pp - p))) != ERROR_SUCCESS) {
srs_error("annexb add video sample failed. ret=%d", ret);
return ret;
if ((err = video->add_sample(p, (int)(pp - p))) != srs_success) {
return srs_error_wrap(err, "add video frame");
}
}
return ret;
return err;
}
int SrsFormat::avc_demux_ibmf_format(SrsBuffer* stream)
srs_error_t SrsFormat::avc_demux_ibmf_format(SrsBuffer* stream)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
int PictureLength = stream->size() - stream->pos();
@ -1205,9 +1146,7 @@ int SrsFormat::avc_demux_ibmf_format(SrsBuffer* stream)
for (int i = 0; i < PictureLength;) {
// unsigned int((NAL_unit_length+1)*8) NALUnitLength;
if (!stream->require(vcodec->NAL_unit_length + 1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode NALU size failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "avc decode NALU size");
}
int32_t NALUnitLength = 0;
if (vcodec->NAL_unit_length == 3) {
@ -1221,33 +1160,28 @@ int SrsFormat::avc_demux_ibmf_format(SrsBuffer* stream)
// maybe stream is invalid format.
// see: https://github.com/ossrs/srs/issues/183
if (NALUnitLength < 0) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("maybe stream is AnnexB format. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "maybe stream is AnnexB format");
}
// NALUnit
if (!stream->require(NALUnitLength)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("avc decode NALU data failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "avc decode NALU data");
}
// 7.3.1 NAL unit syntax, ISO_IEC_14496-10-AVC-2003.pdf, page 44.
if ((ret = video->add_sample(stream->data() + stream->pos(), NALUnitLength)) != ERROR_SUCCESS) {
srs_error("avc add video sample failed. ret=%d", ret);
return ret;
if ((err = video->add_sample(stream->data() + stream->pos(), NALUnitLength)) != srs_success) {
return srs_error_wrap(err, "avc add video frame");
}
stream->skip(NALUnitLength);
i += vcodec->NAL_unit_length + 1 + NALUnitLength;
}
return ret;
return err;
}
int SrsFormat::audio_aac_demux(SrsBuffer* stream, int64_t timestamp)
srs_error_t SrsFormat::audio_aac_demux(SrsBuffer* stream, int64_t timestamp)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
audio->cts = 0;
audio->dts = timestamp;
@ -1269,20 +1203,16 @@ int SrsFormat::audio_aac_demux(SrsBuffer* stream, int64_t timestamp)
// we support h.264+mp3 for hls.
if (codec_id == SrsAudioCodecIdMP3) {
return ERROR_HLS_TRY_MP3;
return srs_error_new(ERROR_HLS_TRY_MP3, "try mp3");
}
// only support aac
if (codec_id != SrsAudioCodecIdAAC) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("aac only support mp3/aac codec. actual=%d, ret=%d", codec_id, ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "not supported codec %d", codec_id);
}
if (!stream->require(1)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("aac decode aac_packet_type failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "aac decode aac_packet_type");
}
SrsAudioAacFrameTrait aac_packet_type = (SrsAudioAacFrameTrait)stream->read_1bytes();
@ -1300,22 +1230,21 @@ int SrsFormat::audio_aac_demux(SrsBuffer* stream, int64_t timestamp)
char *copy_stream_from = stream->data() + stream->pos();
acodec->aac_extra_data = std::vector<char>(copy_stream_from, copy_stream_from + aac_extra_size);
if ((ret = audio_aac_sequence_header_demux(&acodec->aac_extra_data[0], aac_extra_size)) != ERROR_SUCCESS) {
return ret;
if ((err = audio_aac_sequence_header_demux(&acodec->aac_extra_data[0], aac_extra_size)) != srs_success) {
return srs_error_wrap(err, "demux aac sh");
}
}
} else if (aac_packet_type == SrsAudioAacFrameTraitRawData) {
// ensure the sequence header demuxed
if (!acodec->is_aac_codec_ok()) {
srs_warn("aac ignore type=%d for no sequence header. ret=%d", aac_packet_type, ret);
return ret;
srs_warn("aac ignore type=%d for no sequence header", aac_packet_type);
return err;
}
// Raw AAC frame data in UI8 []
// 6.3 Raw Data, ISO_IEC_13818-7-AAC-2004.pdf, page 28
if ((ret = audio->add_sample(stream->data() + stream->pos(), stream->size() - stream->pos())) != ERROR_SUCCESS) {
srs_error("aac add sample failed. ret=%d", ret);
return ret;
if ((err = audio->add_sample(stream->data() + stream->pos(), stream->size() - stream->pos())) != srs_success) {
return srs_error_wrap(err, "add audio frame");
}
} else {
// ignored.
@ -1344,15 +1273,12 @@ int SrsFormat::audio_aac_demux(SrsBuffer* stream, int64_t timestamp)
};
}
srs_info("aac decoded, type=%d, codec=%d, asize=%d, rate=%d, format=%d, size=%d", sound_type, codec_id, sound_size,
sound_rate, sound_format, stream->size() - stream->pos());
return ret;
return err;
}
int SrsFormat::audio_mp3_demux(SrsBuffer* stream, int64_t timestamp)
srs_error_t SrsFormat::audio_mp3_demux(SrsBuffer* stream, int64_t timestamp)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
audio->cts = 0;
audio->dts = timestamp;
@ -1381,30 +1307,27 @@ int SrsFormat::audio_mp3_demux(SrsBuffer* stream, int64_t timestamp)
stream->skip(1);
if (stream->empty()) {
return ret;
return err;
}
char* data = stream->data() + stream->pos();
int size = stream->size() - stream->pos();
// mp3 payload.
if ((ret = audio->add_sample(data, size)) != ERROR_SUCCESS) {
srs_error("audio codec add mp3 sample failed. ret=%d", ret);
return ret;
if ((err = audio->add_sample(data, size)) != srs_success) {
return srs_error_wrap(err, "add audio frame");
}
srs_info("audio decoded, codec=%d, ssize=%d, srate=%d, channels=%d, size=%d",
acodec->id, acodec->sound_size, acodec->sound_rate, acodec->sound_type, size);
return ret;
return err;
}
int SrsFormat::audio_aac_sequence_header_demux(char* data, int size)
srs_error_t SrsFormat::audio_aac_sequence_header_demux(char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = buffer->initialize(data, size)) != ERROR_SUCCESS) {
return ret;
return srs_error_new(ret, "init buffer");
}
// only need to decode the first 2bytes:
@ -1412,9 +1335,7 @@ int SrsFormat::audio_aac_sequence_header_demux(char* data, int size)
// samplingFrequencyIndex, aac_sample_rate, 4bits.
// channelConfiguration, aac_channels, 4bits
if (!buffer->require(2)) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("audio codec decode aac sequence header failed. ret=%d", ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "audio codec decode aac sh");
}
uint8_t profile_ObjectType = buffer->read_1bytes();
uint8_t samplingFrequencyIndex = buffer->read_1bytes();
@ -1429,10 +1350,7 @@ int SrsFormat::audio_aac_sequence_header_demux(char* data, int size)
// convert the object type in sequence header to aac profile of ADTS.
acodec->aac_object = (SrsAacObjectType)profile_ObjectType;
if (acodec->aac_object == SrsAacObjectTypeReserved) {
ret = ERROR_HLS_DECODE_ERROR;
srs_error("audio codec decode aac sequence header failed, "
"adts object=%d invalid. ret=%d", profile_ObjectType, ret);
return ret;
return srs_error_new(ERROR_HLS_DECODE_ERROR, "aac decode sh object %d", profile_ObjectType);
}
// TODO: FIXME: to support aac he/he-v2, see: ngx_rtmp_codec_parse_aac_header
@ -1447,6 +1365,6 @@ int SrsFormat::audio_aac_sequence_header_demux(char* data, int size)
//aac_profile = 1;
//}
return ret;
return err;
}

@ -621,9 +621,9 @@ public:
virtual ~SrsFrame();
public:
// Initialize the frame, to parse sampels.
virtual int initialize(SrsCodecConfig* c);
virtual srs_error_t initialize(SrsCodecConfig* c);
// Add a sample to frame.
virtual int add_sample(char* bytes, int size);
virtual srs_error_t add_sample(char* bytes, int size);
};
/**
@ -662,7 +662,7 @@ public:
virtual ~SrsVideoFrame();
public:
// Add the sample without ANNEXB or IBMF header, or RAW AAC or MP3 data.
virtual int add_sample(char* bytes, int size);
virtual srs_error_t add_sample(char* bytes, int size);
public:
virtual SrsVideoCodecConfig* vcodec();
};
@ -696,12 +696,12 @@ public:
virtual srs_error_t initialize();
// When got a parsed audio packet.
// @param data The data in FLV format.
virtual int on_audio(int64_t timestamp, char* data, int size);
virtual srs_error_t on_audio(int64_t timestamp, char* data, int size);
// When got a parsed video packet.
// @param data The data in FLV format.
virtual int on_video(int64_t timestamp, char* data, int size);
virtual srs_error_t on_video(int64_t timestamp, char* data, int size);
// When got a audio aac sequence header.
virtual int on_aac_sequence_header(char* data, int size);
virtual srs_error_t on_aac_sequence_header(char* data, int size);
public:
virtual bool is_aac_sequence_header();
virtual bool is_avc_sequence_header();
@ -710,28 +710,28 @@ private:
// The packet is muxed in FLV format, defined in flv specification.
// Demux the sps/pps from sequence header.
// Demux the samples from NALUs.
virtual int video_avc_demux(SrsBuffer* stream, int64_t timestamp);
virtual srs_error_t video_avc_demux(SrsBuffer* stream, int64_t timestamp);
private:
// Parse the H.264 SPS/PPS.
virtual int avc_demux_sps_pps(SrsBuffer* stream);
virtual int avc_demux_sps();
virtual int avc_demux_sps_rbsp(char* rbsp, int nb_rbsp);
virtual srs_error_t avc_demux_sps_pps(SrsBuffer* stream);
virtual srs_error_t avc_demux_sps();
virtual srs_error_t avc_demux_sps_rbsp(char* rbsp, int nb_rbsp);
private:
// Parse the H.264 NALUs.
virtual int video_nalu_demux(SrsBuffer* stream);
virtual srs_error_t video_nalu_demux(SrsBuffer* stream);
// Demux the avc NALU in "AnnexB" from ISO_IEC_14496-10-AVC-2003.pdf, page 211.
virtual int avc_demux_annexb_format(SrsBuffer* stream);
virtual srs_error_t avc_demux_annexb_format(SrsBuffer* stream);
// Demux the avc NALU in "ISO Base Media File Format" from ISO_IEC_14496-15-AVC-format-2012.pdf, page 20
virtual int avc_demux_ibmf_format(SrsBuffer* stream);
virtual srs_error_t avc_demux_ibmf_format(SrsBuffer* stream);
private:
// Demux the audio packet in AAC codec.
// Demux the asc from sequence header.
// Demux the sampels from RAW data.
virtual int audio_aac_demux(SrsBuffer* stream, int64_t timestamp);
virtual int audio_mp3_demux(SrsBuffer* stream, int64_t timestamp);
virtual srs_error_t audio_aac_demux(SrsBuffer* stream, int64_t timestamp);
virtual srs_error_t audio_mp3_demux(SrsBuffer* stream, int64_t timestamp);
public:
// Directly demux the sequence header, without RTMP packet header.
virtual int audio_aac_sequence_header_demux(char* data, int size);
virtual srs_error_t audio_aac_sequence_header_demux(char* data, int size);
};
#endif

@ -37,6 +37,12 @@ bool srs_is_system_control_error(int error_code)
|| error_code == ERROR_CONTROL_REDIRECT;
}
bool srs_is_system_control_error(srs_error_t err)
{
int error_code = srs_error_code(err);
return srs_is_system_control_error(error_code);
}
bool srs_is_client_gracefully_close(int error_code)
{
return error_code == ERROR_SOCKET_READ
@ -44,6 +50,12 @@ bool srs_is_client_gracefully_close(int error_code)
|| error_code == ERROR_SOCKET_WRITE;
}
bool srs_is_client_gracefully_close(srs_error_t err)
{
int error_code = srs_error_code(err);
return srs_is_client_gracefully_close(error_code);
}
SrsCplxError::SrsCplxError()
{
code = ERROR_SUCCESS;

@ -325,7 +325,7 @@
// user-define error.
///////////////////////////////////////////////////////
#define ERROR_USER_START 9000
#define ERROR_USER_DISCONNECT 9001
//#define ERROR_USER_DISCONNECT 9001
#define ERROR_SOURCE_NOT_FOUND 9002
#define ERROR_USER_END 9999
@ -334,7 +334,9 @@
*/
// TODO: FIXME: Remove it from underlayer for confused with error and logger.
extern bool srs_is_system_control_error(int error_code);
extern bool srs_is_system_control_error(srs_error_t err);
extern bool srs_is_client_gracefully_close(int error_code);
extern bool srs_is_client_gracefully_close(srs_error_t err);
// Use complex errors, @read https://github.com/ossrs/srs/issues/913
class SrsCplxError
@ -374,6 +376,7 @@ public:
#define srs_error_copy(err) SrsCplxError::copy(err)
#define srs_error_desc(err) SrsCplxError::description(err)
#define srs_error_code(err) SrsCplxError::error_code(err)
#define srs_error_reset(err) srs_freep(err); err = srs_success
#endif

@ -2957,11 +2957,10 @@ srs_error_t SrsTsTransmuxer::initialize(SrsFileWriter* fw)
srs_error_t SrsTsTransmuxer::write_audio(int64_t timestamp, char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = format->on_audio(timestamp, data, size)) != ERROR_SUCCESS) {
return srs_error_new(ret, "ts: format on audio");
if ((err = format->on_audio(timestamp, data, size)) != srs_success) {
return srs_error_wrap(err, "ts: format on audio");
}
// ts support audio codec: aac/mp3
@ -2994,11 +2993,10 @@ srs_error_t SrsTsTransmuxer::write_audio(int64_t timestamp, char* data, int size
srs_error_t SrsTsTransmuxer::write_video(int64_t timestamp, char* data, int size)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
if ((ret = format->on_video(timestamp, data, size)) != ERROR_SUCCESS) {
return srs_error_new(ret, "ts: on video");
if ((err = format->on_video(timestamp, data, size)) != srs_success) {
return srs_error_wrap(err, "ts: on video");
}
// ignore info frame,

@ -38,16 +38,13 @@ SrsRtmpFormat::~SrsRtmpFormat()
{
}
int SrsRtmpFormat::on_metadata(SrsOnMetaDataPacket* meta)
srs_error_t SrsRtmpFormat::on_metadata(SrsOnMetaDataPacket* meta)
{
int ret = ERROR_SUCCESS;
// TODO: FIXME: Try to initialize format from metadata.
return ret;
return srs_success;
}
int SrsRtmpFormat::on_audio(SrsSharedPtrMessage* shared_audio)
srs_error_t SrsRtmpFormat::on_audio(SrsSharedPtrMessage* shared_audio)
{
SrsSharedPtrMessage* msg = shared_audio;
char* data = msg->payload;
@ -56,12 +53,12 @@ int SrsRtmpFormat::on_audio(SrsSharedPtrMessage* shared_audio)
return SrsFormat::on_audio(msg->timestamp, data, size);
}
int SrsRtmpFormat::on_audio(int64_t timestamp, char* data, int size)
srs_error_t SrsRtmpFormat::on_audio(int64_t timestamp, char* data, int size)
{
return SrsFormat::on_audio(timestamp, data, size);
}
int SrsRtmpFormat::on_video(SrsSharedPtrMessage* shared_video)
srs_error_t SrsRtmpFormat::on_video(SrsSharedPtrMessage* shared_video)
{
SrsSharedPtrMessage* msg = shared_video;
char* data = msg->payload;
@ -70,7 +67,7 @@ int SrsRtmpFormat::on_video(SrsSharedPtrMessage* shared_video)
return SrsFormat::on_video(msg->timestamp, data, size);
}
int SrsRtmpFormat::on_video(int64_t timestamp, char* data, int size)
srs_error_t SrsRtmpFormat::on_video(int64_t timestamp, char* data, int size)
{
return SrsFormat::on_video(timestamp, data, size);
}

@ -41,13 +41,13 @@ public:
virtual ~SrsRtmpFormat();
public:
// Initialize the format from metadata, optional.
virtual int on_metadata(SrsOnMetaDataPacket* meta);
virtual srs_error_t on_metadata(SrsOnMetaDataPacket* meta);
// When got a parsed audio packet.
virtual int on_audio(SrsSharedPtrMessage* shared_audio);
virtual int on_audio(int64_t timestamp, char* data, int size);
virtual srs_error_t on_audio(SrsSharedPtrMessage* shared_audio);
virtual srs_error_t on_audio(int64_t timestamp, char* data, int size);
// When got a parsed video packet.
virtual int on_video(SrsSharedPtrMessage* shared_video);
virtual int on_video(int64_t timestamp, char* data, int size);
virtual srs_error_t on_video(SrsSharedPtrMessage* shared_video);
virtual srs_error_t on_video(int64_t timestamp, char* data, int size);
};
#endif

@ -219,6 +219,7 @@ int SrsRtpPacket::decode(SrsBuffer* stream)
int SrsRtpPacket::decode_97(SrsBuffer* stream)
{
int ret = ERROR_SUCCESS;
srs_error_t err = srs_success;
// atleast 2bytes content.
if (!stream->require(2)) {
@ -265,7 +266,10 @@ int SrsRtpPacket::decode_97(SrsBuffer* stream)
return ret;
}
if ((ret = audio->add_sample(sample, sample_size)) != ERROR_SUCCESS) {
if ((err = audio->add_sample(sample, sample_size)) != srs_success) {
// TODO: FIXME: Use error
ret = srs_error_code(err);
srs_freep(err);
srs_error("rtsp: rtp type97 add sample failed. ret=%d", ret);
return ret;
}

Loading…
Cancel
Save