standards - rtsp with new audio and video refactors and sync standard working

This commit is contained in:
Gerard CL
2014-05-20 16:51:35 +02:00
parent 4361eef2d1
commit e322ffdbde
23 changed files with 906 additions and 636 deletions

View File

@@ -1220,7 +1220,7 @@ case $host in
CXXFLAGS="$CXXFLAGS ${GLIB_CFLAGS} ${CURL_CFLAGS}"
RTSP_INC=
RTSP_LIB="${GLIB_LIBS} ${CURL_LIBS}"
RTSP_OBJ="src/utils/h264_stream.o src/video_capture/rtsp.o src/rtp/rtpdec_h264.o"
RTSP_OBJ="src/utils/h264_stream.o src/video_capture/rtsp.o src/rtp/rtpdec_h264.o src/rtp/rtpenc_h264.o"
AC_SUBST(RTSP_LIB_TARGET, "lib/ultragrid/vidcap_rtsp.so.$video_capture_abi_version")
LIB_TARGETS="$LIB_TARGETS $RTSP_LIB_TARGET"
LIB_OBJS="$LIB_OBJS $RTSP_OBJ"
@@ -2630,7 +2630,7 @@ if test $system = Linux -o $system = MacOSX; then
fi
fi
AC_SUBST(NCURSES_LIBS)
AC_SUBST(IMPORT_CONTROL_KEYBOARD_LIBS)
# -------------------------------------------------------------------------------------------------
# OpenSSL-libcrypto

View File

@@ -735,6 +735,13 @@ static void audio_sender_process_message(struct state_audio *s, struct msg_sende
fprintf(stderr, "Changing audio receiver to: %s failed!\n",
msg->receiver);
}
if (rtcp_change_dest(s->audio_network_device,
msg->receiver) == FALSE){
fprintf(stderr, "Changing rtcp audio receiver to: %s failed!\n",
msg->receiver);
}
break;
case SENDER_MSG_CHANGE_PORT:
rtp_done(s->audio_network_device);
@@ -814,7 +821,7 @@ static void *audio_sender_thread(void *arg)
audio_frame2 *compressed = NULL;
while((compressed = audio_codec_compress(s->audio_coder, uncompressed))) {
//TODO to be dynamic as a function of the selected codec, now only accepting mulaw without checking errors
audio_tx_send_mulaw(s->tx_session, s->audio_network_device, compressed);
audio_tx_send_standard(s->tx_session, s->audio_network_device, compressed);
uncompressed = NULL;
}
}

View File

@@ -210,7 +210,7 @@ static void usage(void)
printf("\n");
printf("\t-c <cfg> \tcompress video (see '-c help')\n");
printf("\n");
printf("\t--rtsp-server \t\tRTSP server: dynamically serving H264 RTP standard transport\n");
printf("\t--rtsp-server \tRTSP server: dynamically serving H264 RTP standard transport (use '--rtps-server=help' to see usage)\n");
printf("\n");
printf("\t-i|--sage[=<opts>] \tiHDTV compatibility mode / SAGE TX\n");
printf("\n");
@@ -434,6 +434,8 @@ int main(int argc, char *argv[])
const char *requested_audio_fec = DEFAULT_AUDIO_FEC;
char *audio_channel_map = NULL;
const char *audio_scale = "mixauto";
rtsp_serv_t* rtsp_server = NULL;
int rtsp_port = 0;
bool isStd = FALSE;
int recv_port_number = PORT_BASE;
int send_port_number = PORT_BASE;
@@ -464,6 +466,7 @@ int main(int argc, char *argv[])
bool receiver_thread_started = false,
capture_thread_started = false;
unsigned display_flags = 0;
int compressed_audio_sample_rate = 48000;
int ret;
struct vidcap_params *audio_cap_dev;
long packet_rate;
@@ -500,7 +503,7 @@ int main(int argc, char *argv[])
{"compress", required_argument, 0, 'c'},
{"ihdtv", no_argument, 0, 'i'},
{"sage", optional_argument, 0, 'S'},
{"rtsp-server", no_argument, 0, 'H'},
{"rtsp-server", optional_argument, 0, 'H'},
{"receive", required_argument, 0, 'r'},
{"send", required_argument, 0, 's'},
{"help", no_argument, 0, 'h'},
@@ -608,7 +611,16 @@ int main(int argc, char *argv[])
break;
case 'H':
video_protocol = H264_STD;
//h264_opts = optarg;
if (optarg == NULL) {
rtsp_port = 0;
} else {
if (!strcmp(optarg, "help")) {
rtps_server_usage();
return 0;
}
rtsp_port = get_rtsp_server_port(optarg);
if (rtsp_port == -1) return 0;
}
break;
case 'r':
audio_recv = optarg;
@@ -956,20 +968,17 @@ int main(int argc, char *argv[])
display_device, requested_mtu,
argc, argv);
}else if (video_protocol == H264_STD) {
rtps_types_t avType;
if(strcmp("none", vidcap_params_get_driver(vidcap_params_head)) != 0 && (strcmp("none",audio_send) != 0)) avType = avStdDyn; //AVStream
else if((strcmp("none",audio_send) != 0)) avType = audioPCMUdyn; //AStream
else if(strcmp("none", vidcap_params_get_driver(vidcap_params_head))) avType = videoH264; //VStream
else {
printf("[RTSP SERVER CHECK] no stream type... check capture devices input...\n");
return EXIT_FAIL_USAGE;
}
rtps_types_t avType;
if(strcmp("none", vidcap_params_get_driver(vidcap_params_head)) != 0 && (strcmp("none",audio_send) != 0)) avType = av; //AVStream
else if((strcmp("none",audio_send) != 0)) avType = audio; //AStream
else if(strcmp("none", vidcap_params_get_driver(vidcap_params_head))) avType = video; //VStream
else printf("[RTSP SERVER CHECK] no stream type... check capture devices input...\n");
uv->state_video_rxtx = new h264_rtp_video_rxtx(&root_mod, video_exporter,
requested_compression, requested_encryption,
requested_receiver, recv_port_number, send_port_number,
ipv6, requested_mcast_if, requested_video_fec, requested_mtu,
packet_rate, avType);
packet_rate, avType, get_audio_codec(audio_codec), compressed_audio_sample_rate, audio_capture_channels, 2 /*bps*/, rtsp_port);
} else if (video_protocol == ULTRAGRID_RTP) {
uv->state_video_rxtx = new ultragrid_rtp_video_rxtx(&root_mod, video_exporter,
requested_compression, requested_encryption,
@@ -1065,6 +1074,10 @@ cleanup:
vidcap_params_head = next;
}
#ifdef HAVE_RTSP_SERVER
if(rtsp_server) c_stop_server(rtsp_server);
#endif
module_done(&root_mod);
free(uv);

View File

@@ -3879,6 +3879,19 @@ int rtp_change_dest(struct rtp *session, const char *addr)
return udp_change_dest(session->rtp_socket, addr);
}
/**
* rtcp_change_dest:
* Changes RTCP destination address.
* There must be only one sending thread.
* @session: The RTCP Session.
* @addr: New Receiver Address.
* Returns TRUE if ok, FALSE if not
*/
int rtcp_change_dest(struct rtp *session, const char *addr)
{
return udp_change_dest(session->rtcp_socket, addr);
}
uint64_t rtp_get_bytes_sent(struct rtp *session)
{
return session->rtp_bytes_sent;

View File

@@ -289,6 +289,7 @@ int rtp_set_send_buf(struct rtp *session, int bufsize);
void rtp_flush_recv_buf(struct rtp *session);
int rtp_change_dest(struct rtp *session, const char *addr);
int rtcp_change_dest(struct rtp *session, const char *addr);
uint64_t rtp_get_bytes_sent(struct rtp *session);
int rtp_compute_fract_lost(struct rtp *session, uint32_t ssrc);

View File

@@ -50,7 +50,8 @@ extern "C" {
* Packet formats are described in papers referenced here:<br/>
* https://www.sitola.cz/igrid/index.php/Developer_Documentation#Packet_formats
*/
#define PT_ITU_T_G711_PCMU 00 /* mU-law mono */
#define PT_ITU_T_G711_PCMU 0 /* mU-law std */
#define PT_ITU_T_G711_PCMA 8 /* A-law std */
#define PT_VIDEO 20
#define PT_AUDIO 21
#define PT_VIDEO_LDGM 22

195
src/rtp/rtpenc_h264.c Normal file
View File

@@ -0,0 +1,195 @@
/*
* AUTHOR: Gerard Castillo <gerard.castillo@i2cat.net>,
* David Cassany <david.cassany@i2cat.net>
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the University of Southern
* California Information Sciences Institute.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#include "config_unix.h"
#endif // HAVE_CONFIG_H
#include "debug.h"
#include "perf.h"
#include "transmit.h"
#include "module.h"
#include "tv.h"
#include "rtp/rtp.h"
#include "rtp/rtp_callback.h"
#include "rtp/pbuf.h"
#include "rtp/rtpenc_h264.h"
#include "video.h"
#include "video_codec.h"
#include "compat/platform_spin.h"
#include "video_frame.h"
//UTILS DECLARATIONS
u_int32_t test4Bytes(struct rtpenc_h264_state *rtpench264state);
unsigned char* startOfFrame(struct rtpenc_h264_state *rtpench264state);
unsigned char* nextToParse(struct rtpenc_h264_state *rtpench264state);
void checkEndOfFrame(struct rtpenc_h264_state *rtpench264state,
unsigned numBytesNeeded);
u_int8_t get1Byte(struct rtpenc_h264_state *rtpench264state);
void setFromState(struct rtpenc_h264_state *rtpench264state);
void setToState(struct rtpenc_h264_state *rtpench264state);
void skipBytes(struct rtpenc_h264_state *rtpench264state, unsigned numBytes);
bool haveSeenEOF(struct rtpenc_h264_state *rtpench264state); //EndOfFrame
unsigned curNALSize(struct rtpenc_h264_state *rtpench264state);
struct rtpenc_h264_state * rtpenc_h264_init_state() {
struct rtpenc_h264_state *rtpench264state;
rtpench264state = calloc(1, sizeof(struct rtpenc_h264_state));
rtpench264state->curParserIndex = 0;
rtpench264state->curParserIndexOffset = 0;
rtpench264state->haveSeenFirstStartCode = false;
rtpench264state->inputFrameSize = 0;
rtpench264state->haveSeenEOF = false;
return rtpench264state;
}
unsigned rtpenc_h264_frame_parse(struct rtpenc_h264_state *rtpench264state, uint8_t *buf_in, int size) {
u_int32_t next4Bytes = NULL;
if (!rtpench264state->haveSeenFirstStartCode) {
//reset pointers and params of interest for this new frame to parse and send
rtpench264state->startOfFrame = rtpench264state->from = rtpench264state->to = buf_in;
rtpench264state->curParserIndex = 0;
rtpench264state->inputFrameSize = size;
rtpench264state->curParserIndexOffset = 0;
// The frame must start with a 0x00000001:
// Skip over any input bytes that precede the first 0x00000001 and assert it
while (test4Bytes(rtpench264state) != 0x00000001) {
get1Byte(rtpench264state);
if(haveSeenEOF(rtpench264state)){
error_msg("No NAL found!\n");
return 0; //this shouldn't happen -> this would mean that we got new frame but no start code was found inside....
}
}
skipBytes(rtpench264state, 4); // skip this initial code
setFromState(rtpench264state); //set 'from' pointer of curr input frame
rtpench264state->haveSeenFirstStartCode = true;
} else {
//CONTINUE WITH THE SAME FRAME
// Assert: next4Bytes starts with 0x00000001 or 0x000001, and we've saved and sent all previous bytes (forming a complete NAL unit).
// Skip over these remaining bytes
if (test4Bytes(rtpench264state) == 0x00000001) {
skipBytes(rtpench264state, 4);
} else {
skipBytes(rtpench264state, 3);
}
if(haveSeenEOF(rtpench264state)){
error_msg("No NAL found!\n");
return 0; //this shouldn't happen -> this would mean that we got more to parse but we run out of space....
}
setFromState(rtpench264state); //re-set 'from' pointer of current input data
}
// Then save everything up until the next 0x00000001 (4 bytes) or 0x000001 (3 bytes), or we hit EOF.
// Also make note of the first byte, because it contains the "nal_unit_type":
next4Bytes = test4Bytes(rtpench264state);
rtpench264state->firstByteOfNALUnit = next4Bytes >> 24;
while (next4Bytes != 0x00000001 && (next4Bytes & 0xFFFFFF00) != 0x00000100 && !haveSeenEOF(rtpench264state)) {
// We save at least some of "next4Bytes".
if ((unsigned) (next4Bytes & 0xFF) > 1) {
// Common case: 0x00000001 or 0x000001 definitely doesn't begin anywhere in "next4Bytes", so we save all of it:
skipBytes(rtpench264state, 4);
} else {
// Save the first byte, and continue testing the rest:
skipBytes(rtpench264state, 1);
}
next4Bytes = test4Bytes(rtpench264state);
}
setToState(rtpench264state);
return curNALSize(rtpench264state);
}
//UTILS
u_int32_t test4Bytes(struct rtpenc_h264_state *rtpench264state) {
checkEndOfFrame(rtpench264state, 4);
unsigned char const* ptr = nextToParse(rtpench264state);
return (ptr[0] << 24) | (ptr[1] << 16) | (ptr[2] << 8) | ptr[3];
}
unsigned char* startOfFrame(struct rtpenc_h264_state *rtpench264state) {
return rtpench264state->startOfFrame;
}
unsigned char* nextToParse(struct rtpenc_h264_state *rtpench264state) {
return &startOfFrame(rtpench264state)[rtpench264state->curParserIndex];
}
void checkEndOfFrame(struct rtpenc_h264_state *rtpench264state,
unsigned numBytesNeeded) {
// assure EOF check
if (rtpench264state->curParserIndex + numBytesNeeded
>= rtpench264state->inputFrameSize){
rtpench264state->haveSeenEOF = true;
}
}
u_int8_t get1Byte(struct rtpenc_h264_state *rtpench264state) { // byte-aligned
checkEndOfFrame(rtpench264state, 1);
return startOfFrame(rtpench264state)[rtpench264state->curParserIndex++];
}
void setFromState(struct rtpenc_h264_state *rtpench264state) {
rtpench264state->from = rtpench264state->startOfFrame
+ rtpench264state->curParserIndex;
rtpench264state->curParserIndexOffset = rtpench264state->curParserIndex;
}
void setToState(struct rtpenc_h264_state *rtpench264state) {
if(haveSeenEOF(rtpench264state)) {
rtpench264state->to = rtpench264state->startOfFrame + rtpench264state->inputFrameSize;
} else {
rtpench264state->to = rtpench264state->from
+ (rtpench264state->curParserIndex - rtpench264state->curParserIndexOffset);
}
}
void skipBytes(struct rtpenc_h264_state *rtpench264state, unsigned numBytes) {
checkEndOfFrame(rtpench264state, numBytes);
rtpench264state->curParserIndex += numBytes;
}
bool haveSeenEOF(struct rtpenc_h264_state *rtpench264state) {
return rtpench264state->haveSeenEOF;
}
unsigned curNALSize(struct rtpenc_h264_state *rtpench264state) {
return (rtpench264state->to - rtpench264state->from);
}

75
src/rtp/rtpenc_h264.h Normal file
View File

@@ -0,0 +1,75 @@
/*
* AUTHOR: Gerard Castillo <gerard.castillo@i2cat.net>,
* David Cassany <david.cassany@i2cat.net>
*
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the University of Southern
* California Information Sciences Institute.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef _RTP_ENC_H264_H
#define _RTP_ENC_H264_H
#ifdef __cplusplus
extern "C" {
#endif
#define RTPENC_H264_PT 96
struct rtpenc_h264_state {
bool haveSeenFirstStartCode;
u_int8_t firstByteOfNALUnit;
unsigned char* startOfFrame;
unsigned char* to;
unsigned char* from;
unsigned maxPacketSize;
unsigned curNALOffset;
bool lastNALUnitFragment;
unsigned curParserIndex; // <= inputFrameSize
unsigned curParserIndexOffset;
unsigned inputFrameSize;
bool haveSeenEOF;
};
struct rtpenc_h264_state * rtpenc_h264_init_state();
unsigned rtpenc_h264_frame_parse(struct rtpenc_h264_state *rtpench264state, uint8_t *buf_in, int size);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -48,24 +48,29 @@
BasicRTSPOnlyServer *BasicRTSPOnlyServer::srvInstance = NULL;
BasicRTSPOnlyServer::BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType){
BasicRTSPOnlyServer::BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port){
if(mod == NULL){
exit(1);
}
this->fPort = port;
this->mod = mod;
this->avType = avType;
this->audio_codec = audio_codec;
this->audio_sample_rate = audio_sample_rate;
this->audio_channels = audio_channels;
this->audio_bps = audio_bps;
this->rtp_port = rtp_port;
this->rtspServer = NULL;
this->env = NULL;
this->srvInstance = this;
}
BasicRTSPOnlyServer*
BasicRTSPOnlyServer::initInstance(int port, struct module *mod, rtps_types_t avType){
BasicRTSPOnlyServer::initInstance(int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port){
if (srvInstance != NULL){
return srvInstance;
}
return new BasicRTSPOnlyServer(port, mod, avType);
return new BasicRTSPOnlyServer(port, mod, avType, audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port);
}
BasicRTSPOnlyServer*
@@ -111,19 +116,22 @@ int BasicRTSPOnlyServer::init_server() {
"UltraGrid RTSP server enabling standard transport",
"UltraGrid RTSP server");
if(avType == avStdDyn){
if(avType == av){
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, audioPCMUdyn));
::createNew(*env, True, mod, audio, audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port));
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, videoH264));
}else if(avType == avStd){
::createNew(*env, True, mod, video, audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port));
}else if(avType == audio){
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, audioPCMUstd));
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, videoH264));
}else sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, avType));
::createNew(*env, True, mod, audio, audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port));
}else if(avType == video){
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, video, audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port));
}else{
*env << "\n[RTSP Server] Error when trying to play stream type: \"" << avType << "\"\n";
exit(1);
}
rtspServer->addServerMediaSession(sms);

View File

@@ -48,16 +48,17 @@
#include <RTSPServer.hh>
#include <BasicUsageEnvironment.hh>
#include "rtsp/rtsp_utils.h"
#include "audio/audio.h"
#include "module.h"
class BasicRTSPOnlyServer {
private:
BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType);
BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port);
public:
static BasicRTSPOnlyServer* initInstance(int port, struct module *mod, rtps_types_t avType);
static BasicRTSPOnlyServer* initInstance(int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port);
static BasicRTSPOnlyServer* getInstance();
int init_server();
@@ -72,6 +73,11 @@ private:
int fPort;
struct module *mod;
rtps_types_t avType;
audio_codec_t audio_codec;
int audio_sample_rate;
int audio_channels;
int audio_bps;
int rtp_port; //server rtp port
RTSPServer* rtspServer;
UsageEnvironment* env;
};

View File

@@ -51,269 +51,283 @@
BasicRTSPOnlySubsession*
BasicRTSPOnlySubsession::createNew(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod, rtps_types_t avType){
return new BasicRTSPOnlySubsession(env, reuseFirstSource, mod, avType);
Boolean reuseFirstSource, struct module *mod, rtps_types_t avType,
audio_codec_t audio_codec, int audio_sample_rate, int audio_channels,
int audio_bps, int rtp_port) {
return new BasicRTSPOnlySubsession(env, reuseFirstSource, mod, avType,
audio_codec, audio_sample_rate, audio_channels, audio_bps, rtp_port);
}
BasicRTSPOnlySubsession
::BasicRTSPOnlySubsession(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod, rtps_types_t avType)
: ServerMediaSubsession(env),
fSDPLines(NULL),
fReuseFirstSource(reuseFirstSource), fLastStreamToken(NULL) {
Vdestination = NULL;
Adestination = NULL;
gethostname(fCNAME, sizeof fCNAME);
this->fmod = mod;
this->avType = avType;
fCNAME[sizeof fCNAME-1] = '\0';
BasicRTSPOnlySubsession::BasicRTSPOnlySubsession(UsageEnvironment& env,
Boolean reuseFirstSource, struct module *mod, rtps_types_t avType,
audio_codec_t audio_codec, int audio_sample_rate, int audio_channels,
int audio_bps, int rtp_port) :
ServerMediaSubsession(env), fSDPLines(NULL), fReuseFirstSource(
reuseFirstSource), fLastStreamToken(NULL) {
Vdestination = NULL;
Adestination = NULL;
gethostname(fCNAME, sizeof fCNAME);
this->fmod = mod;
this->avType = avType;
this->audio_codec = audio_codec;
this->audio_sample_rate = audio_sample_rate;
this->audio_channels = audio_channels;
this->audio_bps = audio_bps;
this->rtp_port = rtp_port;
fCNAME[sizeof fCNAME - 1] = '\0';
}
BasicRTSPOnlySubsession::~BasicRTSPOnlySubsession() {
delete[] fSDPLines;
delete Adestination;
delete Vdestination;
delete[] fSDPLines;
delete Adestination;
delete Vdestination;
}
char const* BasicRTSPOnlySubsession::sdpLines() {
if (fSDPLines == NULL){
setSDPLines();
}
if(Adestination != NULL || Vdestination != NULL) return NULL;
return fSDPLines;
if (fSDPLines == NULL) {
setSDPLines();
}
if (Adestination != NULL || Vdestination != NULL)
return NULL;
return fSDPLines;
}
void BasicRTSPOnlySubsession
::setSDPLines() {
//TODO: should be more dynamic
//VStream
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
unsigned estBitrate = 5000;
char const* mediaType = "video";
uint8_t rtpPayloadType = 96;
AddressString ipAddressStr(fServerAddressForSDP);
char* rtpmapLine = strdup("a=rtpmap:96 H264/90000\n");
char const* auxSDPLine = "";
void BasicRTSPOnlySubsession::setSDPLines() {
//TODO: should be more dynamic
//VStream
if (avType == video || avType == av) {
unsigned estBitrate = 5000;
char const* mediaType = "video";
uint8_t rtpPayloadType = 96;
AddressString ipAddressStr(fServerAddressForSDP);
char* rtpmapLine = strdup("a=rtpmap:96 H264/90000\n");
char const* auxSDPLine = "";
char const* const sdpFmt =
"m=%s %u RTP/AVP %u\r\n"
"c=IN IP4 %s\r\n"
"b=AS:%u\r\n"
"%s"
"a=control:%s\r\n";
unsigned sdpFmtSize = strlen(sdpFmt)
+ strlen(mediaType) + 5 /* max short len */ + 3 /* max char len */
+ strlen(ipAddressStr.val())
+ 20 /* max int len */
+ strlen(rtpmapLine)
+ strlen(trackId());
char* sdpLines = new char[sdpFmtSize];
char const* const sdpFmt = "m=%s %u RTP/AVP %u\r\n"
"c=IN IP4 %s\r\n"
"b=AS:%u\r\n"
"%s"
"a=control:%s\r\n";
unsigned sdpFmtSize = strlen(sdpFmt) + strlen(mediaType) + 5 /* max short len */
+ 3 /* max char len */
+ strlen(ipAddressStr.val()) + 20 /* max int len */
+ strlen(rtpmapLine) + strlen(trackId());
char* sdpLines = new char[sdpFmtSize];
sprintf(sdpLines, sdpFmt,
mediaType, // m= <media>
fPortNumForSDP, // m= <port>
rtpPayloadType, // m= <fmt list>
ipAddressStr.val(), // c= address
estBitrate, // b=AS:<bandwidth>
rtpmapLine, // a=rtpmap:... (if present)
trackId()); // a=control:<track-id>
sprintf(sdpLines, sdpFmt, mediaType, // m= <media>
fPortNumForSDP, // m= <port>
rtpPayloadType, // m= <fmt list>
ipAddressStr.val(), // c= address
estBitrate, // b=AS:<bandwidth>
rtpmapLine, // a=rtpmap:... (if present)
trackId()); // a=control:<track-id>
fSDPLines = sdpLines;
}
//AStream
if(avType == audioPCMUdyn || avType == avStdDyn){
unsigned estBitrate = 384;
char const* mediaType = "audio";
uint8_t rtpPayloadType = 97;
AddressString ipAddressStr(fServerAddressForSDP);
char* rtpmapLine = strdup("a=rtpmap:97 PCMU/48000/2\n");
char const* auxSDPLine = "";
fSDPLines = sdpLines;
}
//AStream
if (avType == audio || avType == av) {
unsigned estBitrate = 384;
char const* mediaType = "audio";
AddressString ipAddressStr(fServerAddressForSDP);
uint8_t rtpPayloadType;
char const* const sdpFmt =
"m=%s %u RTP/AVP %u\r\n"
"c=IN IP4 %s\r\n"
"b=AS:%u\r\n"
"%s"
"a=control:%s\r\n";
unsigned sdpFmtSize = strlen(sdpFmt)
+ strlen(mediaType) + 5 /* max short len */ + 3 /* max char len */
+ strlen(ipAddressStr.val())
+ 20 /* max int len */
+ strlen(rtpmapLine)
+ strlen(trackId());
char* sdpLines = new char[sdpFmtSize];
if (audio_sample_rate == 8000 && audio_channels == 1) { //NOW NOT COMPUTING 1 BPS BECAUSE RESAMPLER FORCES TO 2 BPS...
if (audio_codec == AC_MULAW)
rtpPayloadType = 0;
if (audio_codec == AC_ALAW)
rtpPayloadType = 8;
} else {
rtpPayloadType = 97;
}
sprintf(sdpLines, sdpFmt,
mediaType, // m= <media>
fPortNumForSDP, // m= <port>
rtpPayloadType, // m= <fmt list>
ipAddressStr.val(), // c= address
estBitrate, // b=AS:<bandwidth>
rtpmapLine, // a=rtpmap:... (if present)
trackId()); // a=control:<track-id>
char* rtpmapLine = strdup("a=rtpmap:97 PCMU/48000/2\n"); //only to alloc max possible size
char const* auxSDPLine = "";
fSDPLines = sdpLines;
}
char const* const sdpFmt = "m=%s %u RTP/AVP %u\r\n"
"c=IN IP4 %s\r\n"
"b=AS:%u\r\n"
"a=rtpmap:%u %s/%d/%d\r\n"
"a=control:%s\r\n";
unsigned sdpFmtSize = strlen(sdpFmt) + strlen(mediaType) + 5 /* max short len */
+ 3 /* max char len */
+ strlen(ipAddressStr.val()) + 20 /* max int len */
+ strlen(rtpmapLine) + strlen(trackId());
char* sdpLines = new char[sdpFmtSize];
sprintf(sdpLines, sdpFmt,
mediaType, // m= <media>
fPortNumForSDP, // m= <port>
rtpPayloadType, // m= <fmt list>
ipAddressStr.val(), // c= address
estBitrate, // b=AS:<bandwidth>
//rtpmapLine, // a=rtpmap:... (if present)
rtpPayloadType,
audio_codec == AC_MULAW ? "PCMU" : "PCMA",
audio_sample_rate,
audio_channels,
trackId()); // a=control:<track-id>
fSDPLines = sdpLines;
}
}
void BasicRTSPOnlySubsession::getStreamParameters(unsigned clientSessionId,
netAddressBits clientAddress,
Port const& clientRTPPort,
Port const& clientRTCPPort,
int tcpSocketNum,
unsigned char rtpChannelId,
unsigned char rtcpChannelId,
netAddressBits& destinationAddress,
u_int8_t& /*destinationTTL*/,
Boolean& isMulticast,
Port& serverRTPPort,
Port& serverRTCPPort,
void*& streamToken) {
netAddressBits clientAddress, Port const& clientRTPPort,
Port const& clientRTCPPort, int tcpSocketNum,
unsigned char rtpChannelId, unsigned char rtcpChannelId,
netAddressBits& destinationAddress, u_int8_t& /*destinationTTL*/,
Boolean& isMulticast, Port& serverRTPPort, Port& serverRTCPPort,
void*& streamToken) {
if(Vdestination == NULL && (avType == videoH264 || avType == avStdDyn || avType == avStd)){
if (fSDPLines == NULL){
setSDPLines();
}
if (destinationAddress == 0) {
destinationAddress = clientAddress;
}
struct in_addr destinationAddr;
destinationAddr.s_addr = destinationAddress;
Vdestination = new Destinations(destinationAddr, clientRTPPort,clientRTCPPort);
}
if(Adestination == NULL && (avType == audioPCMUdyn || avType == avStdDyn)){
if (fSDPLines == NULL){
setSDPLines();
}
if (destinationAddress == 0) {
destinationAddress = clientAddress;
}
struct in_addr destinationAddr;
destinationAddr.s_addr = destinationAddress;
Adestination = new Destinations(destinationAddr, clientRTPPort,clientRTCPPort);
}
Port rtp(rtp_port);
serverRTPPort = rtp;
Port rtcp(rtp_port + 1);
serverRTCPPort = rtcp;
if (Vdestination == NULL && (avType == video || avType == av)) {
if (fSDPLines == NULL) {
setSDPLines();
}
if (destinationAddress == 0) {
destinationAddress = clientAddress;
}
struct in_addr destinationAddr;
destinationAddr.s_addr = destinationAddress;
Vdestination = new Destinations(destinationAddr, clientRTPPort,
clientRTCPPort);
}
if (Adestination == NULL && (avType == audio || avType == av)) {
if (fSDPLines == NULL) {
setSDPLines();
}
if (destinationAddress == 0) {
destinationAddress = clientAddress;
}
struct in_addr destinationAddr;
destinationAddr.s_addr = destinationAddress;
Adestination = new Destinations(destinationAddr, clientRTPPort,
clientRTCPPort);
}
}
void BasicRTSPOnlySubsession::startStream(unsigned clientSessionId,
void* streamToken,
TaskFunc* rtcpRRHandler,
void* rtcpRRHandlerClientData,
unsigned short& rtpSeqNum,
unsigned& rtpTimestamp,
ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler,
void* serverRequestAlternativeByteHandlerClientData) {
struct response *resp = NULL;
void* streamToken, TaskFunc* rtcpRRHandler,
void* rtcpRRHandlerClientData, unsigned short& rtpSeqNum,
unsigned& rtpTimestamp,
ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler,
void* serverRequestAlternativeByteHandlerClientData) {
struct response *resp = NULL;
if (Vdestination != NULL){
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
char pathV[1024];
if (Vdestination != NULL) {
if (avType == video || avType == av) {
char pathV[1024];
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER,
MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
//CHANGE DST PORT
struct msg_sender *msgV1 =
(struct msg_sender *)
new_message(sizeof(struct msg_sender));
msgV1->port = ntohs(Vdestination->rtpPort.num());
msgV1->type = SENDER_MSG_CHANGE_PORT;
resp = send_message(fmod, pathV, (struct message *) msgV1);
resp = NULL;
//CHANGE DST PORT
struct msg_sender *msgV1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
msgV1->port = ntohs(Vdestination->rtpPort.num());
msgV1->type = SENDER_MSG_CHANGE_PORT;
resp = send_message(fmod, pathV, (struct message *) msgV1);
resp = NULL;
//CHANGE DST ADDRESS
struct msg_sender *msgV2 =
(struct msg_sender *)
new_message(sizeof(struct msg_sender));
strncpy(msgV2->receiver, inet_ntoa(Vdestination->addr), sizeof(msgV2->receiver) - 1);
msgV2->type = SENDER_MSG_CHANGE_RECEIVER;
//CHANGE DST ADDRESS
struct msg_sender *msgV2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgV2->receiver, inet_ntoa(Vdestination->addr),
sizeof(msgV2->receiver) - 1);
msgV2->type = SENDER_MSG_CHANGE_RECEIVER;
resp = send_message(fmod, pathV, (struct message *) msgV2);
resp = NULL;
}
}
resp = send_message(fmod, pathV, (struct message *) msgV2);
resp = NULL;
}
}
if(Adestination != NULL){
if(avType == audioPCMUdyn || avType == avStdDyn){
char pathA[1024];
if (Adestination != NULL) {
if (avType == audio || avType == av) {
char pathA[1024];
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO, MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO,
MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);
//CHANGE DST PORT
struct msg_sender *msgA1 =
(struct msg_sender *)
new_message(sizeof(struct msg_sender));
msgA1->port = ntohs(Adestination->rtpPort.num());
msgA1->type = SENDER_MSG_CHANGE_PORT;
resp = send_message(fmod, pathA, (struct message *) msgA1);
resp = NULL;
//CHANGE DST PORT
struct msg_sender *msgA1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
msgA1->port = ntohs(Adestination->rtpPort.num());
msgA1->type = SENDER_MSG_CHANGE_PORT;
resp = send_message(fmod, pathA, (struct message *) msgA1);
resp = NULL;
//CHANGE DST ADDRESS
struct msg_sender *msgA2 =
(struct msg_sender *)
new_message(sizeof(struct msg_sender));
strncpy(msgA2->receiver, inet_ntoa(Adestination->addr), sizeof(msgA2->receiver) - 1);
msgA2->type = SENDER_MSG_CHANGE_RECEIVER;
//CHANGE DST ADDRESS
struct msg_sender *msgA2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgA2->receiver, inet_ntoa(Adestination->addr),
sizeof(msgA2->receiver) - 1);
msgA2->type = SENDER_MSG_CHANGE_RECEIVER;
resp = send_message(fmod, pathA, (struct message *) msgA2);
resp = NULL;
}
}
resp = send_message(fmod, pathA, (struct message *) msgA2);
resp = NULL;
}
}
}
void BasicRTSPOnlySubsession::deleteStream(unsigned clientSessionId, void*& streamToken){
if (Vdestination != NULL){
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
char pathV[1024];
Vdestination = NULL;
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
void BasicRTSPOnlySubsession::deleteStream(unsigned clientSessionId,
void*& streamToken) {
if (Vdestination != NULL) {
if (avType == video || avType == av) {
char pathV[1024];
Vdestination = NULL;
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER,
MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
//CHANGE DST PORT
struct msg_sender *msgV1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
msgV1->port = 5004;
msgV1->type = SENDER_MSG_CHANGE_PORT;
send_message(fmod, pathV, (struct message *) msgV1);
//CHANGE DST PORT
struct msg_sender *msgV1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
msgV1->port = rtp_port;
msgV1->type = SENDER_MSG_CHANGE_PORT;
send_message(fmod, pathV, (struct message *) msgV1);
//CHANGE DST ADDRESS
struct msg_sender *msgV2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgV2->receiver, "127.0.0.1",
sizeof(msgV2->receiver) - 1);
msgV2->type = SENDER_MSG_CHANGE_RECEIVER;
send_message(fmod, pathV, (struct message *) msgV2);
}
}
//CHANGE DST ADDRESS
struct msg_sender *msgV2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgV2->receiver, "127.0.0.1", sizeof(msgV2->receiver) - 1);
msgV2->type = SENDER_MSG_CHANGE_RECEIVER;
send_message(fmod, pathV, (struct message *) msgV2);
}
}
if(Adestination != NULL){
if(avType == audioPCMUdyn || avType == avStdDyn){
char pathA[1024];
Adestination = NULL;
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO, MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);
if (Adestination != NULL) {
if (avType == audio || avType == av) {
char pathA[1024];
Adestination = NULL;
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO,
MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);
//CHANGE DST PORT
struct msg_sender *msgA1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
msgA1->port = 5006;
msgA1->type = SENDER_MSG_CHANGE_PORT;
send_message(fmod, pathA, (struct message *) msgA1);
//CHANGE DST PORT
struct msg_sender *msgA1 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
//CHANGE DST ADDRESS
struct msg_sender *msgA2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgA2->receiver, "127.0.0.1",
sizeof(msgA2->receiver) - 1);
msgA2->type = SENDER_MSG_CHANGE_RECEIVER;
send_message(fmod, pathA, (struct message *) msgA2);
}
}
//TODO: GET AUDIO PORT SET (NOT A COMMON CASE WHEN RTSP IS ENABLED: DEFAULT -> vport + 2)
msgA1->port = rtp_port + 2;
msgA1->type = SENDER_MSG_CHANGE_PORT;
send_message(fmod, pathA, (struct message *) msgA1);
//CHANGE DST ADDRESS
struct msg_sender *msgA2 = (struct msg_sender *) new_message(
sizeof(struct msg_sender));
strncpy(msgA2->receiver, "127.0.0.1", sizeof(msgA2->receiver) - 1);
msgA2->type = SENDER_MSG_CHANGE_RECEIVER;
send_message(fmod, pathA, (struct message *) msgA2);
}
}
}

View File

@@ -53,6 +53,7 @@ extern "C" {
#endif
#include "rtsp/rtsp_utils.h"
#include "audio/audio.h"
#include "module.h"
#include "control_socket.h"
@@ -92,12 +93,12 @@ public:
createNew(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod,
rtps_types_t avType);
rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port);
protected:
BasicRTSPOnlySubsession(UsageEnvironment& env, Boolean reuseFirstSource,
struct module *mod, rtps_types_t avType);
struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port);
virtual ~BasicRTSPOnlySubsession();
@@ -141,6 +142,11 @@ private:
char fCNAME[100];
struct module *fmod;
rtps_types_t avType;
audio_codec_t audio_codec;
int audio_sample_rate;
int audio_channels;
int audio_bps;
int rtp_port; //server rtp port
};

View File

@@ -46,7 +46,7 @@
int c_start_server(rtsp_serv_t* server){
int ret;
BasicRTSPOnlyServer *srv = BasicRTSPOnlyServer::initInstance(server->port, server->mod, server->avType);
BasicRTSPOnlyServer *srv = BasicRTSPOnlyServer::initInstance(server->port, server->mod, server->avType, server->audio_codec, server->audio_sample_rate, server->audio_channels, server->audio_bps, server->rtp_port);
srv->init_server();
ret = pthread_create(&server->server_th, NULL, BasicRTSPOnlyServer::start_server, &server->watch);
if (ret == 0){
@@ -57,13 +57,18 @@ int c_start_server(rtsp_serv_t* server){
return ret;
}
rtsp_serv_t *init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType){
rtsp_serv_t *init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port){
rtsp_serv_t *server = (rtsp_serv_t*) malloc(sizeof(rtsp_serv_t));
server->port = port;
server->mod = mod;
server->watch = 0;
server->run = FALSE;
server->avType = avType;
server->audio_codec = audio_codec;
server->audio_sample_rate = audio_sample_rate;
server->audio_channels = audio_channels;
server->audio_bps = audio_bps;
server->rtp_port = rtp_port;
return server;
}
@@ -73,3 +78,33 @@ void c_stop_server(rtsp_serv_t* server){
pthread_join(server->server_th, NULL);
}
}
int get_rtsp_server_port(char *config){
int port;
char *tok;
char *save_ptr = NULL;
if(strcmp((strtok_r(config, ":", &save_ptr)),"port") == 0){
if ((tok = strtok_r(NULL, ":", &save_ptr))) {
port = atoi(tok);
if (!(port >= 0 && port <= 65535)) {
printf("\n[RTSP SERVER] ERROR - please, enter a valid port number.\n");
rtps_server_usage();
return -1;
} else return port;
} else {
printf("\n[RTSP SERVER] ERROR - please, enter a port number.\n");
rtps_server_usage();
return -1;
}
} else {
printf("\n[RTSP SERVER] ERROR - please, check usage.\n");
rtps_server_usage();
return -1;
}
}
void rtps_server_usage(){
printf("\n[RTSP SERVER] usage:\n");
printf("\t--rtsp-server[=port:number]\n");
printf("\t\tdefault rtsp server port number: 8554\n\n");
}

View File

@@ -60,6 +60,7 @@ extern "C" {
#include "module.h"
#include "debug.h"
#include "rtsp/rtsp_utils.h"
#include "audio/audio.h"
#ifdef __cplusplus
}
@@ -79,13 +80,22 @@ EXTERNC typedef struct rtsp_serv {
uint8_t watch;
uint8_t run;
rtps_types_t avType;
audio_codec_t audio_codec;
int audio_sample_rate;
int audio_channels;
int audio_bps;
int rtp_port; //server rtp port
} rtsp_serv_t;
EXTERNC int c_start_server(rtsp_serv_t* server);
EXTERNC void c_stop_server(rtsp_serv_t* server);
EXTERNC rtsp_serv_t* init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType);
EXTERNC rtsp_serv_t* init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtp_port);
EXTERNC void rtps_server_usage();
EXTERNC int get_rtsp_server_port(char *config);
#undef EXTERNC

View File

@@ -3,13 +3,9 @@
typedef enum {
none,
avStd,
avStdDyn,
avUG,
videoH264,
videoUG,
audioPCMUstd,
audioPCMUdyn,
av,
video,
audio,
NUM_RTSP_FORMATS
}rtps_types_t;

View File

@@ -73,6 +73,7 @@
#include "rtp/fec.h"
#include "rtp/rtp.h"
#include "rtp/rtp_callback.h"
#include "rtp/rtpenc_h264.h"
#include "tv.h"
#include "transmit.h"
#include "video.h"
@@ -97,19 +98,11 @@
#define GET_DELTA delta = (long)((double)(stop.QuadPart - start.QuadPart) * 1000 * 1000 * 1000 / freq.QuadPart);
#endif
#define RTPENC_H264_MAX_NALS 1024*2*2
#define RTPENC_H264_PT 96
// Mulaw audio memory reservation
#define BUFFER_MTU_SIZE 1500
static char *data_buffer_mulaw;
static int buffer_mulaw_init = 0;
struct rtp_nal_t {
uint8_t *data;
int size;
};
static void tx_update(struct tx *tx, struct video_frame *frame, int substream);
static void tx_done(struct module *tx);
static uint32_t format_interl_fps_hdr_row(enum interlacing_t interlacing, double input_fps);
@@ -149,6 +142,8 @@ struct tx {
struct openssl_encrypt *encryption;
long packet_rate;
struct rtpenc_h264_state *rtpenc_h264_state;
};
// Mulaw audio memory reservation
@@ -230,11 +225,12 @@ struct tx *tx_init(struct module *parent, unsigned mtu, enum tx_media_type media
}
}
tx->packet_rate = packet_rate;
platform_spin_init(&tx->spin);
}
return tx;
tx->packet_rate = packet_rate;
tx->rtpenc_h264_state = rtpenc_h264_init_state();
platform_spin_init(&tx->spin);
}
return tx;
}
struct tx *tx_init_h264(struct module *parent, unsigned mtu, enum tx_media_type media_type,
@@ -756,371 +752,225 @@ void audio_tx_send(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer
}
/*
* audio_tx_send_mulaw - Send interleaved channels from the audio_frame2 at 1 bps,
* as the mulaw standard.
* audio_tx_send_standard - Send interleaved channels from the audio_frame2,
* as the mulaw and A-law standards (dynamic or std PT).
*/
void audio_tx_send_mulaw(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer)
{
//TODO to be more abstract in order to accept A-law too and other supported standards with such implementation
assert(buffer->codec == AC_MULAW);
void audio_tx_send_standard(struct tx* tx, struct rtp *rtp_session,
audio_frame2 * buffer) {
//TODO to be more abstract in order to accept A-law too and other supported standards with such implementation
assert(buffer->codec == AC_MULAW || buffer->codec == AC_ALAW);
int pt;
uint32_t timestamp;
int pt;
uint32_t ts;
static uint32_t ts_prev = 0;
struct timeval curr_time;
platform_spin_lock(&tx->spin);
platform_spin_lock(&tx->spin);
// Configure the right Payload type,
// 8000 Hz, 1 channel and 2 bps is the ITU-T G.711 standard (should be 1 bps...)
// Other channels or Hz goes to DynRTP-Type97
if (buffer->ch_count == 1 && buffer->sample_rate == 8000) {
if (buffer->codec == AC_MULAW)
pt = PT_ITU_T_G711_PCMU;
if (buffer->codec == AC_ALAW)
pt = PT_ITU_T_G711_PCMA;
} else {
pt = PT_DynRTP_Type97;
}
// Configure the right Payload type,
// 8000 Hz, 1 channel is the ITU-T G.711 standard
// More channels or Hz goes to DynRTP-Type97
// The sizes for the different audio_frame2 channels must be the same.
for (int i = 1; i < buffer->ch_count; i++)
assert(buffer->data_len[0] == buffer->data_len[i]);
if (buffer->ch_count == 1 && buffer->sample_rate == 8000) {
pt = PT_ITU_T_G711_PCMU;
} else {
pt = PT_DynRTP_Type97;
}
int data_len = buffer->data_len[0] * buffer->ch_count; /* Number of samples to send */
int payload_size = tx->mtu - 40; /* Max size of an RTP payload field */
// The sizes for the different audio_frame2 channels must be the same.
for (int i = 1 ; i < buffer->ch_count ; i++) assert(buffer->data_len[0] == buffer->data_len[i]);
init_tx_mulaw_buffer();
char *curr_sample = data_buffer_mulaw;
int ch, pos = 0, count = 0, pointerToSend = 0;
int data_len = buffer->data_len[0] * buffer->ch_count; /* Number of samples to send */
int data_remainig = data_len;
int payload_size = tx->mtu - 40; /* Max size of an RTP payload field */
do {
for (ch = 0; ch < buffer->ch_count; ch++) {
memcpy(curr_sample, buffer->data[ch] + pos,
buffer->bps * sizeof(char));
curr_sample += buffer->bps * sizeof(char);
count += buffer->bps * sizeof(char);
}
pos += buffer->bps * sizeof(char);
init_tx_mulaw_buffer();
char *curr_sample = data_buffer_mulaw;
if ((pos * buffer->ch_count) % payload_size == 0) {
// Update first sample timestamp
ts = get_std_audio_local_mediatime((double)payload_size / (double)buffer->ch_count);
gettimeofday(&curr_time, NULL);
rtp_send_ctrl(rtp_session, ts_prev, 0, curr_time); //send RTCP SR
ts_prev = ts;
// Send the packet
rtp_send_data(rtp_session, ts, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend, payload_size, 0, 0, 0);
pointerToSend += payload_size;
}
} while (count < data_len);
int ch, pos = 0, count = 0, pointerToSend = 0;
if ((pos * buffer->ch_count) % payload_size != 0) {
// Update first sample timestamp
ts = get_std_audio_local_mediatime((double)((pos * buffer->ch_count) % payload_size) / (double)buffer->ch_count);
gettimeofday(&curr_time, NULL);
rtp_send_ctrl(rtp_session, ts_prev, 0, curr_time); //send RTCP SR
ts_prev = ts;
// Send the packet
rtp_send_data(rtp_session, ts, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend,
(pos * buffer->ch_count) % payload_size, 0, 0, 0);
}
do{
for(ch = 0; ch < buffer->ch_count; ch++){
memcpy(curr_sample, buffer->data[ch] + pos, buffer->bps * sizeof(char));
curr_sample += buffer->bps * sizeof(char);
count+=buffer->bps * sizeof(char);
data_remainig--;
}
pos += buffer->bps * sizeof(char);
if((pos * buffer->ch_count) % payload_size == 0){
// Update first sample timestamp
timestamp = get_std_audio_local_mediatime((buffer->data_len[0] - (data_remainig/(buffer->bps * buffer->ch_count))));
// Send the packet
rtp_send_data(rtp_session, timestamp, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend, payload_size,
0, 0, 0);
pointerToSend += payload_size;
}
}while(count < data_len);
if((pos * buffer->ch_count) % payload_size != 0){
// Update first sample timestamp
timestamp = get_std_audio_local_mediatime((buffer->data_len[0] - (data_remainig/(buffer->bps * buffer->ch_count))));
// Send the packet
rtp_send_data(rtp_session, timestamp, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend , (pos * buffer->ch_count) % payload_size,
0, 0, 0);
}
tx->buffer ++;
platform_spin_unlock(&tx->spin);
platform_spin_unlock(&tx->spin);
}
static uint8_t *rtpenc_h264_find_startcode_internal(uint8_t *start,
uint8_t *end);
uint8_t *rtpenc_h264_find_startcode(uint8_t *p, uint8_t *end);
int rtpenc_h264_parse_nal_units(uint8_t *buf_in, int size,
struct rtp_nal_t *nals, int *nnals);
/**
* H.264 standard transmission
*/
static void tx_send_base_h264(struct tx *tx, struct video_frame *frame,
struct rtp *rtp_session, uint32_t ts, int send_m, codec_t color_spec,
double input_fps, enum interlacing_t interlacing,
unsigned int substream, int fragment_offset) {
static uint8_t *rtpenc_h264_find_startcode_internal(uint8_t *start,
uint8_t *end)
{
//uint8_t *p = start;
//uint8_t *pend = end; // - 3; // XXX: w/o -3, p[1] and p[2] may fail.
UNUSED(color_spec);
UNUSED(input_fps);
UNUSED(interlacing);
UNUSED(fragment_offset);
UNUSED(send_m);
assert(tx->magic == TRANSMIT_MAGIC);
for (uint8_t *p = start; p < end; p++) {
if (p[0] == 0 && p[1] == 0 && p[2] == 1) {
return p;
}
}
struct tile *tile = &frame->tiles[substream];
return (uint8_t *) NULL;
}
char pt = RTPENC_H264_PT;
unsigned char hdr[2];
int cc = 0;
uint32_t csrc = 0;
int m = 0;
char *extn = 0;
uint16_t extn_len = 0;
uint16_t extn_type = 0;
unsigned nalsize = 0;
uint8_t *data = (uint8_t *) tile->data;
int data_len = tile->data_len;
tx->rtpenc_h264_state->maxPacketSize = tx->mtu - 40;
tx->rtpenc_h264_state->haveSeenEOF = false;
tx->rtpenc_h264_state->haveSeenFirstStartCode = false;
uint8_t *rtpenc_h264_find_startcode(uint8_t *p, uint8_t *end)
{
uint8_t *out = rtpenc_h264_find_startcode_internal(p, end);
if (out != NULL) {
if (p < out && out < end && !out[-1]) {
out--;
}
} else {
debug_msg("No NAL start code found\n"); // It's not an error per se.
}
return out;
}
while ((nalsize = rtpenc_h264_frame_parse(tx->rtpenc_h264_state, data, data_len)) > 0) {
int rtpenc_h264_parse_nal_units(uint8_t *buf_in, int size,
struct rtp_nal_t *nals, int *nnals)
{
uint8_t *p = buf_in;
uint8_t *end = p + size;
uint8_t *nal_start;
uint8_t *nal_end = NULL;
tx->rtpenc_h264_state->curNALOffset = 0;
tx->rtpenc_h264_state->lastNALUnitFragment = false; // by default
size = 0;
*nnals = 0;
// TODO: control error
nal_start = rtpenc_h264_find_startcode(p, end);
for (;;) {
if (nal_start == end || nal_start == NULL) {
break;
}
while(!tx->rtpenc_h264_state->lastNALUnitFragment){
// We have NAL unit data in the buffer. There are three cases to consider:
// 1. There is a new NAL unit in the buffer, and it's small enough to deliver
// to the RTP sink (as is).
// 2. There is a new NAL unit in the buffer, but it's too large to deliver to
// the RTP sink in its entirety. Deliver the first fragment of this data,
// as a FU packet, with one extra preceding header byte (for the "FU header").
// 3. There is a NAL unit in the buffer, and we've already delivered some
// fragment(s) of this. Deliver the next fragment of this data,
// as a FU packet, with two (H.264) extra preceding header bytes
// (for the "NAL header" and the "FU header").
if (tx->rtpenc_h264_state->curNALOffset == 0) { // case 1 or 2
if (nalsize <= tx->rtpenc_h264_state->maxPacketSize) { // case 1
nal_end = rtpenc_h264_find_startcode(nal_start + 3, end);
if (nal_end == NULL) {
nal_end = end;
}
int nal_size = nal_end - nal_start;
if (tx->rtpenc_h264_state->haveSeenEOF) m = 1;
if (rtp_send_data(rtp_session, ts, pt, m, cc, &csrc,
(char *) tx->rtpenc_h264_state->from, nalsize,
extn, extn_len, extn_type) < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
tx->rtpenc_h264_state->lastNALUnitFragment = true;
} else { // case 2
// We need to send the NAL unit data as FU packets. Deliver the first
// packet now. Note that we add "NAL header" and "FU header" bytes to the front
// of the packet (overwriting the existing "NAL header").
hdr[0] = (tx->rtpenc_h264_state->firstByteOfNALUnit & 0xE0) | 28; //FU indicator
hdr[1] = 0x80 | (tx->rtpenc_h264_state->firstByteOfNALUnit & 0x1F); // FU header (with S bit)
if(nal_size > 4){
size += nal_size;
nals[(*nnals)].data = nal_start;
nals[(*nnals)].size = nal_size;
(*nnals)++;
if (rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *) hdr, 2,
(char *) tx->rtpenc_h264_state->from + 1, tx->rtpenc_h264_state->maxPacketSize - 2,
extn, extn_len, extn_type) < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
tx->rtpenc_h264_state->curNALOffset += tx->rtpenc_h264_state->maxPacketSize - 1;
tx->rtpenc_h264_state->lastNALUnitFragment = false;
nalsize -= tx->rtpenc_h264_state->maxPacketSize - 1;
}
} else { // case 3
// We are sending this NAL unit data as FU packets. We've already sent the
// first packet (fragment). Now, send the next fragment. Note that we add
// "NAL header" and "FU header" bytes to the front. (We reuse these bytes that
// we already sent for the first fragment, but clear the S bit, and add the E
// bit if this is the last fragment.)
hdr[1] = hdr[1] & ~0x80;// FU header (no S bit)
nal_start = nal_end;
}else nal_start += 3;
}
return size;
}
if (nalsize + 1 > tx->rtpenc_h264_state->maxPacketSize) {
// We can't send all of the remaining data this time:
if (rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *) hdr, 2,
(char *) tx->rtpenc_h264_state->from + tx->rtpenc_h264_state->curNALOffset,
tx->rtpenc_h264_state->maxPacketSize - 2, extn, extn_len,
extn_type) < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
tx->rtpenc_h264_state->curNALOffset += tx->rtpenc_h264_state->maxPacketSize - 2;
tx->rtpenc_h264_state->lastNALUnitFragment = false;
nalsize -= tx->rtpenc_h264_state->maxPacketSize - 2;
static void tx_send_base_h264(struct tx *tx, struct tile *tile, struct rtp *rtp_session, uint32_t ts,
int send_m, codec_t color_spec, double input_fps,
enum interlacing_t interlacing, unsigned int substream,
int fragment_offset)
{
} else {
// This is the last fragment:
if (tx->rtpenc_h264_state->haveSeenEOF) m = 1;
UNUSED(color_spec);
UNUSED(input_fps);
UNUSED(interlacing);
UNUSED(substream);
UNUSED(fragment_offset);
hdr[1] |= 0x40;// set the E bit in the FU header
assert(tx->magic == TRANSMIT_MAGIC);
//tx_update(tx, tile);
if (rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *) hdr, 2,
(char *) tx->rtpenc_h264_state->from + tx->rtpenc_h264_state->curNALOffset,
nalsize, extn, extn_len, extn_type) < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
tx->rtpenc_h264_state->lastNALUnitFragment = true;
}
}
}
uint8_t *data = (uint8_t *) tile->data;
int data_len = tile->data_len;
struct rtp_nal_t nals[RTPENC_H264_MAX_NALS];
int nnals = 0;
rtpenc_h264_parse_nal_units(data, data_len, nals, &nnals);
debug_msg("%d NAL units found in buffer\n", nnals);
char pt = RTPENC_H264_PT;
int cc = 0;
uint32_t csrc = 0;
char *extn = 0;
uint16_t extn_len = 0;
uint16_t extn_type = 0;
int i;
for (i = 0; i < nnals; i++) {
struct rtp_nal_t nal = nals[i];
int fragmentation = 0;
int nal_max_size = tx->mtu - 40;
if (nal.size > nal_max_size) {
debug_msg("RTP packet size exceeds the MTU size\n");
fragmentation = 1;
}
uint8_t *nal_header = nal.data;
// skip startcode
int startcode_size = 0;
uint8_t *p = nal_header;
while ((*(p++)) == (uint8_t)0) {
startcode_size++;
}
startcode_size++;
nal_header += startcode_size;
int nal_header_size = 1;
uint8_t *nal_payload = nal.data + nal_header_size + startcode_size; // nal.data + nal_header_size;
int nal_payload_size = nal.size - (int)(nal_header_size + startcode_size); //nal.size - nal_header_size;
const char type = (char) (*nal_header & 0x1f);
const char nri = (char) ((*nal_header & 0x60) >> 5);
debug_msg("NAL recv | %d bytes | header: %d %d %d %d %d %d %d %d | type: %d | NRI: %d\n",
nal_header_size + nal_payload_size,
((*nal_header) & 0x80) >> 7, ((*nal_header) & 0x40) >> 6,
((*nal_header) & 0x20) >> 5, ((*nal_header) & 0x10) >> 4,
((*nal_header) & 0x08) >> 3, ((*nal_header) & 0x04) >> 2,
((*nal_header) & 0x02) >> 1, ((*nal_header) & 0x01),
(int)(*nal_header & 0x1f), (int)((*nal_header & 0x60) >> 5));
char info_type;
if (type >= 1 && type <= 23) {
info_type = 1;
} else {
info_type = type;
}
switch (info_type) {
case 0:
case 1:
debug_msg("Unfragmented or reconstructed NAL type\n");
break;
default:
error_msg("Non expected NAL type %d\n", (int)info_type);
return; // TODO maybe just warn and don't fail?
break;
}
int m = 0;
if (!fragmentation) {
if (i == nnals - 1) {
m = send_m;
debug_msg("NAL with M bit\n");
}
int err = rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *)nal_header, nal_header_size,
(char *)nal_payload, nal_payload_size, extn, extn_len,
extn_type);
/*unsigned char *dst = (unsigned char *)(nal.data);
unsigned char *end = (unsigned char *)(nal.data + nal.size);
debug_msg("\n\nFirst four bytes: %02x %02x %02x %02x\n", dst[0], dst[1], dst[2], dst[3]);
debug_msg("Last four bytes: %02x %02x %02x %02x\n",
end[-4],
end[-3],
end[-2],
end[-1]);
debug_msg("NAL size: %d\n\n", nal.size); // - startcode_size); */
if (err < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
else {
debug_msg("NAL sent | %d bytes | header: %d %d %d %d %d %d %d %d | type: %d | NRI: %d\n",
nal_payload_size + nal_header_size,
((*nal_header) & 0x80) >> 7, ((*nal_header) & 0x40) >> 6,
((*nal_header) & 0x20) >> 5, ((*nal_header) & 0x10) >> 4,
((*nal_header) & 0x08) >> 3, ((*nal_header) & 0x04) >> 2,
((*nal_header) & 0x02) >> 1, ((*nal_header) & 0x01),
(int)(*nal_header & 0x1f), (int)((*nal_header & 0x60) >> 5));
}
}
else {
uint8_t frag_header[2];
int frag_header_size = 2;
frag_header[0] = 28 | (nri << 5); // fu_indicator, new type, same nri
frag_header[1] = type | (1 << 7);// start, initial fu_header
uint8_t *frag_payload = nal_payload;
int frag_payload_size = nal_max_size - frag_header_size;
int remaining_payload_size = nal_payload_size;
while (remaining_payload_size + 2 > nal_max_size) {
debug_msg("NAL 1st 6 payload bytes: %x %x %x %x %x %x\n",
(unsigned char)frag_payload[0], (unsigned char)frag_payload[1],
(unsigned char)frag_payload[2], (unsigned char)frag_payload[3],
(unsigned char)frag_payload[4], (unsigned char)frag_payload[5]);
int err = rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *)frag_header, frag_header_size,
(char *)frag_payload, frag_payload_size, extn, extn_len,
extn_type);
if (err < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
else {
debug_msg("NAL fragment send | %d bytes | flag %d\n", frag_payload_size + frag_header_size, (int)((frag_header[1] & 0xE0) >> 5)); //flag: 0 -> O , 2 -> E , 4 -> S , other -> !
}
remaining_payload_size -= frag_payload_size;
frag_payload += frag_payload_size;
frag_header[1] = type;
}
if (i == nnals - 1) {
m = send_m;
debug_msg("NAL fragment (E) with M bit\n");
}
frag_header[1] = type | (1 << 6); // end
debug_msg("NAL 1st 6 payload bytes: %x %x %x %x %x %x\n",
(unsigned char)frag_payload[0], (unsigned char)frag_payload[1],
(unsigned char)frag_payload[2], (unsigned char)frag_payload[3],
(unsigned char)frag_payload[4], (unsigned char)frag_payload[5]);
int err = rtp_send_data_hdr(rtp_session, ts, pt, m, cc, &csrc,
(char *)frag_header, frag_header_size,
(char *)frag_payload, remaining_payload_size, extn, extn_len,
extn_type);
if (err < 0) {
error_msg("There was a problem sending the RTP packet\n");
}
else {
debug_msg("NAL fragment send | %d bytes | flag %d\n", remaining_payload_size + frag_header_size, (int)((frag_header[1] & 0xE0) >> 5)); //flag: 0 -> O , 2 -> E , 4 -> S , other -> !
}
}
}
if (tx->rtpenc_h264_state->haveSeenEOF){
return;
}
}
}
/*
* sends one or more frames (tiles) with same TS in one RTP stream. Only one m-bit is set.
*/
void
tx_send_h264(struct tx *tx, struct video_frame *frame, struct rtp *rtp_session)
{
unsigned int i;
uint32_t ts = 0;
void tx_send_h264(struct tx *tx, struct video_frame *frame,
struct rtp *rtp_session) {
unsigned int i;
struct timeval curr_time;
static uint32_t ts_prev = 0;
uint32_t ts = 0;
assert(!frame->fragment || tx->fec_scheme == FEC_NONE); // currently no support for FEC with fragments
assert(!frame->fragment || frame->tile_count); // multiple tile are not currently supported for fragmented send
assert(!frame->fragment || tx->fec_scheme == FEC_NONE); // currently no support for FEC with fragments
assert(!frame->fragment || frame->tile_count); // multiple tiles are not currently supported for fragmented send
platform_spin_lock(&tx->spin);
platform_spin_lock(&tx->spin);
ts = get_local_mediatime();
if(frame->fragment &&
tx->last_frame_fragment_id == frame->frame_fragment_id) {
ts = tx->last_ts;
} else {
tx->last_frame_fragment_id = frame->frame_fragment_id;
tx->last_ts = ts;
}
ts = get_std_video_local_mediatime(frame->fps);
for(i = 0; i < frame->tile_count; ++i)
{
int last = FALSE;
int fragment_offset = 0;
gettimeofday(&curr_time, NULL);
rtp_send_ctrl(rtp_session, ts_prev, 0, curr_time); //send RTCP SR
ts_prev = ts;
if (i == frame->tile_count - 1) {
if(!frame->fragment || frame->last_fragment)
last = TRUE;
}
if(frame->fragment)
fragment_offset = vf_get_tile(frame, i)->offset;
tx_send_base_h264(tx, frame, rtp_session, ts, 0,
frame->color_spec, frame->fps, frame->interlacing, i,
0);
tx_send_base_h264(tx, vf_get_tile(frame, i), rtp_session, ts, last,
frame->color_spec, frame->fps, frame->interlacing,
i, fragment_offset);
tx->buffer ++;
}
platform_spin_unlock(&tx->spin);
platform_spin_unlock(&tx->spin);
}

View File

@@ -58,6 +58,7 @@
#define TRANSMIT_H_
#include "audio/audio.h"
#include "rtp/rtpenc_h264.h"
#include "types.h"
#ifdef __cplusplus
@@ -74,14 +75,12 @@ struct tx *tx_init(struct module *parent, unsigned mtu, enum tx_media_type media
void tx_send_tile(struct tx *tx_session, struct video_frame *frame, int pos, struct rtp *rtp_session);
void tx_send(struct tx *tx_session, struct video_frame *frame, struct rtp *rtp_session);
void audio_tx_send(struct tx *tx_session, struct rtp *rtp_session, audio_frame2 *buffer);
void audio_tx_send_mulaw(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer);
void audio_tx_send_standard(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer);
void format_video_header(struct video_frame *frame, int tile_idx, int buffer_idx,
uint32_t *hdr);
struct tx *tx_init_h264(struct module *parent, unsigned mtu, enum tx_media_type media_type,
const char *fec, const char *encryption, long packet_rate);
void tx_send_h264(struct tx *tx_session, struct video_frame *frame, struct rtp *rtp_session);
#ifdef __cplusplus

View File

@@ -134,28 +134,61 @@ int tv_gt(struct timeval a, struct timeval b)
/*
* STANDARD TRANSPORT - RTP STANDARD
* Calculate initial time on first execution, add per sample time otherwise.
* Calculate initial time on first execution, add per 'sample' time otherwise.
*/
uint32_t get_std_audio_local_mediatime(int samples)
//shared struct for audio and video streams (sync.)
typedef struct {
bool init;
uint32_t random_startime_offset;
struct timeval vtime;
struct timeval atime;
struct timeval start_time;
} std_time_struct;
std_time_struct start_time = { true, 0, 0 };
uint32_t get_std_audio_local_mediatime(double samples)
{
static uint32_t saved_timestamp;
static int first = 0;
if (start_time.init) {
gettimeofday(&start_time.start_time, NULL);
start_time.atime = start_time.start_time;
start_time.vtime = start_time.start_time;
start_time.random_startime_offset = lbl_random();
tv_add_usec(&start_time.vtime, start_time.random_startime_offset);
tv_add_usec(&start_time.atime, start_time.random_startime_offset);
uint32_t curr_timestamp;
if (first == 0) {
struct timeval start_time;
gettimeofday(&start_time, NULL);
curr_timestamp = start_time.tv_sec +
(start_time.tv_usec / 1000000.0) +
lbl_random();
first = 1;
start_time.init = false;
}
else {
curr_timestamp = saved_timestamp;
tv_add(&start_time.atime, samples);
}
saved_timestamp = curr_timestamp + samples;
return curr_timestamp;
return (double)start_time.atime.tv_sec + (((double)start_time.atime.tv_usec) / 1000000.0);
}
uint32_t get_std_video_local_mediatime(double framerate)
{
double vrate = 90000; //default and standard video sample rate (Hz)
double nextFraction;
unsigned nextSecsIncrement;
if (start_time.init) {
gettimeofday(&start_time.start_time, NULL);
start_time.atime = start_time.start_time;
start_time.vtime = start_time.start_time;
start_time.random_startime_offset = lbl_random();
tv_add_usec(&start_time.vtime, start_time.random_startime_offset);
tv_add_usec(&start_time.atime, start_time.random_startime_offset);
start_time.init = false;
}
else {
nextFraction = ( start_time.vtime.tv_usec / 1000000.0 ) + ( 1 / framerate );
nextSecsIncrement = (long) nextFraction;
start_time.vtime.tv_sec += (long) nextSecsIncrement;
start_time.vtime.tv_usec = (long) ((nextFraction - nextSecsIncrement) * 1000000);
}
return ((double)start_time.vtime.tv_sec + (((double)start_time.vtime.tv_usec) / 1000000.0)) * vrate;
}

View File

@@ -54,7 +54,8 @@ uint32_t tv_diff_usec(struct timeval curr_time, struct timeval prev_time);
void tv_add(struct timeval *ts, double offset_secs);
void tv_add_usec(struct timeval *ts, double offset);
int tv_gt(struct timeval a, struct timeval b);
uint32_t get_std_audio_local_mediatime(int samples);
uint32_t get_std_audio_local_mediatime(double samples);
uint32_t get_std_video_local_mediatime(double framerate);
#ifdef __cplusplus
}

View File

@@ -773,7 +773,7 @@ static void setparam_h264(AVCodecContext *codec_ctx, struct setparam_param *para
// percent of CPU.
strncat(params, "no-8x8dct=1:b-adapt=0:bframes=0:no-cabac=1:"
"no-deblock=1:no-mbtree=1:me=dia:no-mixed-refs=1:partitions=none:"
"rc-lookahead=0:ref=1:scenecut=0:subme=0:trellis=0:slice-max-size=50000",
"rc-lookahead=0:ref=1:scenecut=0:subme=0:trellis=0",
sizeof(params) - strlen(params) - 1);
}

View File

@@ -46,6 +46,7 @@
#endif // HAVE_CONFIG_H
#include "transmit.h"
#include "rtp/rtpenc_h264.h"
#include "video_rxtx/h264_rtp.h"
#include "video.h"
@@ -53,13 +54,13 @@ h264_rtp_video_rxtx::h264_rtp_video_rxtx(struct module *parent, struct video_exp
const char *requested_compression, const char *requested_encryption,
const char *receiver, int rx_port, int tx_port,
bool use_ipv6, const char *mcast_if, const char *requested_video_fec, int mtu,
long packet_rate, rtps_types_t avType) :
long packet_rate, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtsp_port) :
rtp_video_rxtx(parent, video_exporter, requested_compression, requested_encryption,
receiver, rx_port, tx_port,
use_ipv6, mcast_if, requested_video_fec, mtu, packet_rate)
{
#ifdef HAVE_RTSP_SERVER
m_rtsp_server = init_rtsp_server(0, parent, avType); //port, root_module, avType
m_rtsp_server = init_rtsp_server(rtsp_port, parent, avType, audio_codec, audio_sample_rate, audio_channels, audio_bps, rx_port);
c_start_server(m_rtsp_server);
#endif
}

View File

@@ -52,7 +52,7 @@ public:
const char *requested_compression, const char *requested_encryption,
const char *receiver, int rx_port, int tx_port,
bool use_ipv6, const char *mcast_if, const char *requested_video_fec, int mtu,
long packet_rate, rtps_types_t avType);
long packet_rate, rtps_types_t avType, audio_codec_t audio_codec, int audio_sample_rate, int audio_channels, int audio_bps, int rtsp_port);
virtual ~h264_rtp_video_rxtx();
private:
virtual void send_frame(struct video_frame *);

View File

@@ -87,6 +87,12 @@ void rtp_video_rxtx::process_message(struct msg_sender *msg)
fprintf(stderr, "Changing receiver to: %s failed!\n",
msg->receiver);
}
if (rtcp_change_dest(m_network_devices[0],
msg->receiver) == FALSE){
fprintf(stderr, "Changing rtcp receiver to: %s failed!\n",
msg->receiver);
}
break;
case SENDER_MSG_CHANGE_PORT:
change_tx_port(msg->port);