fix #121, srs_info detail log compile failed. 0.9.168.

pull/133/head
winlin 11 years ago
parent 2e58fac5eb
commit 91d2296836

@ -209,6 +209,7 @@ Supported operating systems and hardware:
* 2013-10-17, Created.<br/>
## History
* v1.0, 2014-07-19, fix [#121](https://github.com/winlinvip/simple-rtmp-server/issues/121), srs_info detail log compile failed. 0.9.168.
* v1.0, 2014-07-19, fix [#119](https://github.com/winlinvip/simple-rtmp-server/issues/119), use iformat and oformat for ffmpeg transcode. 0.9.163.
* <strong>v1.0, 2014-07-13, [1.0 mainline6(0.9.160)](https://github.com/winlinvip/simple-rtmp-server/releases/tag/1.0.mainline8) released. 50029 lines.</strong>
* v1.0, 2014-07-13, refine the bandwidth check/test, add as/js library, use srs-librtmp for linux tool. 0.9.159

@ -141,9 +141,8 @@ int SrsHttpClient::connect(SrsHttpUri* uri)
server.c_str(), port, timeout, ret);
return ret;
}
srs_info("connect to server success. "
"http url=%s, server=%s, ip=%s, port=%d",
uri->get_url(), uri->get_host(), ip.c_str(), uri->get_port());
srs_info("connect to server success. http url=%s, server=%s, port=%d",
uri->get_url(), uri->get_host(), uri->get_port());
connected = true;

@ -31,7 +31,7 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// current release version
#define VERSION_MAJOR "0"
#define VERSION_MINOR "9"
#define VERSION_REVISION "167"
#define VERSION_REVISION "168"
#define RTMP_SIG_SRS_VERSION VERSION_MAJOR"."VERSION_MINOR"."VERSION_REVISION
// server info.
#define RTMP_SIG_SRS_KEY "SRS"

@ -133,11 +133,11 @@ extern ISrsThreadContext* _srs_context;
#define srs_error(msg, ...) _srs_log->error(__PRETTY_FUNCTION__, _srs_context->get_id(), msg, ##__VA_ARGS__)
#endif
#if 1
#if 0
#undef srs_verbose
#define srs_verbose(msg, ...) (void)0
#endif
#if 1
#if 0
#undef srs_info
#define srs_info(msg, ...) (void)0
#endif

@ -1071,7 +1071,7 @@ int SrsAmf0StrictArray::write(SrsStream* stream)
return ret;
}
srs_verbose("write amf0 property success. name=%s", name.c_str());
srs_verbose("write amf0 property success.");
}
srs_verbose("write strict_array object success.");

@ -289,6 +289,128 @@ messages.
*****************************************************************************
****************************************************************************/
SrsMessageHeader::SrsMessageHeader()
{
message_type = 0;
payload_length = 0;
timestamp_delta = 0;
stream_id = 0;
timestamp = 0;
// we always use the connection chunk-id
perfer_cid = RTMP_CID_OverConnection;
}
SrsMessageHeader::~SrsMessageHeader()
{
}
bool SrsMessageHeader::is_audio()
{
return message_type == RTMP_MSG_AudioMessage;
}
bool SrsMessageHeader::is_video()
{
return message_type == RTMP_MSG_VideoMessage;
}
bool SrsMessageHeader::is_amf0_command()
{
return message_type == RTMP_MSG_AMF0CommandMessage;
}
bool SrsMessageHeader::is_amf0_data()
{
return message_type == RTMP_MSG_AMF0DataMessage;
}
bool SrsMessageHeader::is_amf3_command()
{
return message_type == RTMP_MSG_AMF3CommandMessage;
}
bool SrsMessageHeader::is_amf3_data()
{
return message_type == RTMP_MSG_AMF3DataMessage;
}
bool SrsMessageHeader::is_window_ackledgement_size()
{
return message_type == RTMP_MSG_WindowAcknowledgementSize;
}
bool SrsMessageHeader::is_ackledgement()
{
return message_type == RTMP_MSG_Acknowledgement;
}
bool SrsMessageHeader::is_set_chunk_size()
{
return message_type == RTMP_MSG_SetChunkSize;
}
bool SrsMessageHeader::is_user_control_message()
{
return message_type == RTMP_MSG_UserControlMessage;
}
bool SrsMessageHeader::is_set_peer_bandwidth()
{
return message_type == RTMP_MSG_SetPeerBandwidth;
}
bool SrsMessageHeader::is_aggregate()
{
return message_type == RTMP_MSG_AggregateMessage;
}
void SrsMessageHeader::initialize_amf0_script(int size, int stream)
{
message_type = RTMP_MSG_AMF0DataMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)0;
timestamp = (int64_t)0;
stream_id = (int32_t)stream;
// amf0 script use connection2 chunk-id
perfer_cid = RTMP_CID_OverConnection2;
}
void SrsMessageHeader::initialize_audio(int size, u_int32_t time, int stream)
{
message_type = RTMP_MSG_AudioMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)time;
timestamp = (int64_t)time;
stream_id = (int32_t)stream;
// audio chunk-id
perfer_cid = RTMP_CID_Audio;
}
void SrsMessageHeader::initialize_video(int size, u_int32_t time, int stream)
{
message_type = RTMP_MSG_VideoMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)time;
timestamp = (int64_t)time;
stream_id = (int32_t)stream;
// video chunk-id
perfer_cid = RTMP_CID_Video;
}
SrsMessage::SrsMessage()
{
payload = NULL;
size = 0;
}
SrsMessage::~SrsMessage()
{
}
SrsProtocol::AckWindowSize::AckWindowSize()
{
ack_window_size = acked_size = 0;
@ -1494,118 +1616,6 @@ int SrsProtocol::response_ping_message(int32_t timestamp)
return ret;
}
SrsMessageHeader::SrsMessageHeader()
{
message_type = 0;
payload_length = 0;
timestamp_delta = 0;
stream_id = 0;
timestamp = 0;
// we always use the connection chunk-id
perfer_cid = RTMP_CID_OverConnection;
}
SrsMessageHeader::~SrsMessageHeader()
{
}
bool SrsMessageHeader::is_audio()
{
return message_type == RTMP_MSG_AudioMessage;
}
bool SrsMessageHeader::is_video()
{
return message_type == RTMP_MSG_VideoMessage;
}
bool SrsMessageHeader::is_amf0_command()
{
return message_type == RTMP_MSG_AMF0CommandMessage;
}
bool SrsMessageHeader::is_amf0_data()
{
return message_type == RTMP_MSG_AMF0DataMessage;
}
bool SrsMessageHeader::is_amf3_command()
{
return message_type == RTMP_MSG_AMF3CommandMessage;
}
bool SrsMessageHeader::is_amf3_data()
{
return message_type == RTMP_MSG_AMF3DataMessage;
}
bool SrsMessageHeader::is_window_ackledgement_size()
{
return message_type == RTMP_MSG_WindowAcknowledgementSize;
}
bool SrsMessageHeader::is_ackledgement()
{
return message_type == RTMP_MSG_Acknowledgement;
}
bool SrsMessageHeader::is_set_chunk_size()
{
return message_type == RTMP_MSG_SetChunkSize;
}
bool SrsMessageHeader::is_user_control_message()
{
return message_type == RTMP_MSG_UserControlMessage;
}
bool SrsMessageHeader::is_set_peer_bandwidth()
{
return message_type == RTMP_MSG_SetPeerBandwidth;
}
bool SrsMessageHeader::is_aggregate()
{
return message_type == RTMP_MSG_AggregateMessage;
}
void SrsMessageHeader::initialize_amf0_script(int size, int stream)
{
message_type = RTMP_MSG_AMF0DataMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)0;
timestamp = (int64_t)0;
stream_id = (int32_t)stream;
// amf0 script use connection2 chunk-id
perfer_cid = RTMP_CID_OverConnection2;
}
void SrsMessageHeader::initialize_audio(int size, u_int32_t time, int stream)
{
message_type = RTMP_MSG_AudioMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)time;
timestamp = (int64_t)time;
stream_id = (int32_t)stream;
// audio chunk-id
perfer_cid = RTMP_CID_Audio;
}
void SrsMessageHeader::initialize_video(int size, u_int32_t time, int stream)
{
message_type = RTMP_MSG_VideoMessage;
payload_length = (int32_t)size;
timestamp_delta = (int32_t)time;
timestamp = (int64_t)time;
stream_id = (int32_t)stream;
// video chunk-id
perfer_cid = RTMP_CID_Video;
}
SrsChunkStream::SrsChunkStream(int _cid)
{
fmt = 0;
@ -1620,16 +1630,6 @@ SrsChunkStream::~SrsChunkStream()
srs_freep(msg);
}
SrsMessage::SrsMessage()
{
payload = NULL;
size = 0;
}
SrsMessage::~SrsMessage()
{
}
SrsCommonMessage::SrsCommonMessage()
{
}

@ -47,6 +47,120 @@ class SrsMessageHeader;
class SrsMessage;
class SrsChunkStream;
/**
* 4.1. Message Header
*/
class SrsMessageHeader
{
public:
/**
* 3bytes.
* Three-byte field that contains a timestamp delta of the message.
* The 4 bytes are packed in the big-endian order.
* @remark, only used for decoding message from chunk stream.
*/
int32_t timestamp_delta;
/**
* 3bytes.
* Three-byte field that represents the size of the payload in bytes.
* It is set in big-endian format.
*/
int32_t payload_length;
/**
* 1byte.
* One byte field to represent the message type. A range of type IDs
* (1-7) are reserved for protocol control messages.
*/
int8_t message_type;
/**
* 4bytes.
* Four-byte field that identifies the stream of the message. These
* bytes are set in big-endian format.
*/
int32_t stream_id;
/**
* Four-byte field that contains a timestamp of the message.
* The 4 bytes are packed in the big-endian order.
* @remark, used as calc timestamp when decode and encode time.
* @remark, we use 64bits for large time for jitter detect and hls.
*/
int64_t timestamp;
public:
/**
* get the perfered cid(chunk stream id) which sendout over.
* set at decoding, and canbe used for directly send message,
* for example, dispatch to all connections.
*/
int perfer_cid;
public:
SrsMessageHeader();
virtual ~SrsMessageHeader();
public:
bool is_audio();
bool is_video();
bool is_amf0_command();
bool is_amf0_data();
bool is_amf3_command();
bool is_amf3_data();
bool is_window_ackledgement_size();
bool is_ackledgement();
bool is_set_chunk_size();
bool is_user_control_message();
bool is_set_peer_bandwidth();
bool is_aggregate();
public:
/**
* create a amf0 script header, set the size and stream_id.
*/
void initialize_amf0_script(int size, int stream);
/**
* create a audio header, set the size, timestamp and stream_id.
*/
void initialize_audio(int size, u_int32_t time, int stream);
/**
* create a video header, set the size, timestamp and stream_id.
*/
void initialize_video(int size, u_int32_t time, int stream);
};
/**
* message is raw data RTMP message, bytes oriented,
* protcol always recv RTMP message, and can send RTMP message or RTMP packet.
* the shared-ptr message is a special RTMP message, use ref-count for performance issue.
*
* @remark, never directly new SrsMessage, the constructor is protected,
* for in the SrsMessage, we never know whether we should free the message,
* for SrsCommonMessage, we should free the payload,
* while for SrsSharedPtrMessage, we should use ref-count to free it.
* so, use these two concrete message, SrsCommonMessage or SrsSharedPtrMessage instread.
*/
class SrsMessage
{
// 4.1. Message Header
public:
SrsMessageHeader header;
// 4.2. Message Payload
public:
/**
* current message parsed size,
* size <= header.payload_length
* for the payload maybe sent in multiple chunks.
*/
int size;
/**
* the payload of message, the SrsMessage never know about the detail of payload,
* user must use SrsProtocol.decode_message to get concrete packet.
* @remark, not all message payload can be decoded to packet. for example,
* video/audio packet use raw bytes, no video/audio packet.
*/
char* payload;
protected:
SrsMessage();
public:
virtual ~SrsMessage();
};
/**
* the protocol provides the rtmp-message-protocol services,
* to recv RTMP message from RTMP chunk stream,
@ -277,83 +391,6 @@ private:
virtual int response_ping_message(int32_t timestamp);
};
/**
* 4.1. Message Header
*/
class SrsMessageHeader
{
public:
/**
* 3bytes.
* Three-byte field that contains a timestamp delta of the message.
* The 4 bytes are packed in the big-endian order.
* @remark, only used for decoding message from chunk stream.
*/
int32_t timestamp_delta;
/**
* 3bytes.
* Three-byte field that represents the size of the payload in bytes.
* It is set in big-endian format.
*/
int32_t payload_length;
/**
* 1byte.
* One byte field to represent the message type. A range of type IDs
* (1-7) are reserved for protocol control messages.
*/
int8_t message_type;
/**
* 4bytes.
* Four-byte field that identifies the stream of the message. These
* bytes are set in big-endian format.
*/
int32_t stream_id;
/**
* Four-byte field that contains a timestamp of the message.
* The 4 bytes are packed in the big-endian order.
* @remark, used as calc timestamp when decode and encode time.
* @remark, we use 64bits for large time for jitter detect and hls.
*/
int64_t timestamp;
public:
/**
* get the perfered cid(chunk stream id) which sendout over.
* set at decoding, and canbe used for directly send message,
* for example, dispatch to all connections.
*/
int perfer_cid;
public:
SrsMessageHeader();
virtual ~SrsMessageHeader();
public:
bool is_audio();
bool is_video();
bool is_amf0_command();
bool is_amf0_data();
bool is_amf3_command();
bool is_amf3_data();
bool is_window_ackledgement_size();
bool is_ackledgement();
bool is_set_chunk_size();
bool is_user_control_message();
bool is_set_peer_bandwidth();
bool is_aggregate();
public:
/**
* create a amf0 script header, set the size and stream_id.
*/
void initialize_amf0_script(int size, int stream);
/**
* create a audio header, set the size, timestamp and stream_id.
*/
void initialize_audio(int size, u_int32_t time, int stream);
/**
* create a video header, set the size, timestamp and stream_id.
*/
void initialize_video(int size, u_int32_t time, int stream);
};
/**
* incoming chunk stream maybe interlaced,
* use the chunk stream to cache the input RTMP chunk streams.
@ -392,43 +429,6 @@ public:
virtual ~SrsChunkStream();
};
/**
* message is raw data RTMP message, bytes oriented,
* protcol always recv RTMP message, and can send RTMP message or RTMP packet.
* the shared-ptr message is a special RTMP message, use ref-count for performance issue.
*
* @remark, never directly new SrsMessage, the constructor is protected,
* for in the SrsMessage, we never know whether we should free the message,
* for SrsCommonMessage, we should free the payload,
* while for SrsSharedPtrMessage, we should use ref-count to free it.
* so, use these two concrete message, SrsCommonMessage or SrsSharedPtrMessage instread.
*/
class SrsMessage
{
// 4.1. Message Header
public:
SrsMessageHeader header;
// 4.2. Message Payload
public:
/**
* current message parsed size,
* size <= header.payload_length
* for the payload maybe sent in multiple chunks.
*/
int size;
/**
* the payload of message, the SrsMessage never know about the detail of payload,
* user must use SrsProtocol.decode_message to get concrete packet.
* @remark, not all message payload can be decoded to packet. for example,
* video/audio packet use raw bytes, no video/audio packet.
*/
char* payload;
protected:
SrsMessage();
public:
virtual ~SrsMessage();
};
/**
* the common message used free the payload in common way.
*/

Loading…
Cancel
Save