Merge remote-tracking branch 'seth/trunk' into trunk

This commit is contained in:
Martin Pulec
2014-05-12 11:37:37 +02:00
34 changed files with 4449 additions and 1811 deletions

View File

@@ -543,10 +543,56 @@ static void *audio_receiver_thread(void *arg)
}
pdb_iter_done(&it);
}else if(s->receiver == NET_STANDARD){
//TODO receive mulaw standard RTP (decode frame mulaw callback)
//TODO now expecting to receive mulaw standard RTP (decode frame mulaw callback) , next steps, to be dynamic...
gettimeofday(&curr_time, NULL);
ts = tv_diff(curr_time, s->start_time) * 90000;
rtp_update(s->audio_network_device, curr_time);
rtp_send_ctrl(s->audio_network_device, ts, 0, curr_time);
timeout.tv_sec = 0;
timeout.tv_usec = 999999 / 59.94; /* audio goes almost always at the same rate
as video frames */
rtp_recv_r(s->audio_network_device, &timeout, ts);
pdb_iter_t it;
cp = pdb_iter_init(s->audio_participants, &it);
while (cp != NULL) {
if (audio_pbuf_decode(cp->playout_buffer, curr_time, decode_audio_frame_mulaw, &pbuf_data)) {
bool failed = false;
if(s->echo_state) {
#ifdef HAVE_SPEEX
echo_play(s->echo_state, &pbuf_data.buffer);
#endif
}
struct audio_desc curr_desc;
curr_desc = audio_desc_from_audio_frame(&pbuf_data.buffer);
if(!audio_desc_eq(device_desc, curr_desc)) {
if(audio_reconfigure(s, curr_desc.bps * 8,
curr_desc.ch_count,
curr_desc.sample_rate) != TRUE) {
fprintf(stderr, "Audio reconfiguration failed!");
failed = true;
}
else {
fprintf(stderr, "Audio reconfiguration succeeded.");
device_desc = curr_desc;
rtp_flush_recv_buf(s->audio_network_device);
}
fprintf(stderr, " (%d channels, %d bps, %d Hz)\n",
curr_desc.ch_count,
curr_desc.bps, curr_desc.sample_rate);
}
if(!failed)
audio_playback_put_frame(s->audio_playback_device, &pbuf_data.buffer);
}
pbuf_remove(cp->playout_buffer, curr_time);
cp = pdb_iter_next(&it);
}
pdb_iter_done(&it);
}else { /* NET_JACK */
#ifdef HAVE_JACK_TRANS
decoded = jack_receive(s->jack_connection, &pbuf_data);
@@ -752,7 +798,6 @@ static void *audio_sender_thread(void *arg)
}
}
}else if(s->sender == NET_STANDARD){
//TODO audio_tx_send_mulaw
// RESAMPLE
resample(&resample_state, buffer);
// COMPRESS

View File

@@ -62,9 +62,15 @@
#include "capture_filter/logo.h"
#include "capture_filter/none.h"
#include "capture_filter/scale.h"
#ifdef HAVE_OPENCV
#include "capture_filter/resize.h"
#endif
static struct capture_filter_info *capture_filters[] = {
&capture_filter_blank,
#ifdef HAVE_OPENCV
&capture_filter_resize,
#endif
&capture_filter_every,
&capture_filter_logo,
&capture_filter_none,

View File

@@ -0,0 +1,190 @@
/*
* FILE: capture_filter/resize.cpp
* AUTHORS: Gerard Castillo <gerard.castillo@i2cat.net>
* Marc Palau <marc.palau@i2cat.net>
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the Fundació i2CAT,
* Internet I Innovació Digital a Catalunya. This product also includes
* software developed by CESNET z.s.p.o.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#include "config_unix.h"
#include "config_win32.h"
#endif /* HAVE_CONFIG_H */
#include "capture_filter/resize_utils.h"
#ifdef __cplusplus
extern "C" {
#endif
#include "capture_filter.h"
#include "debug.h"
#include "video.h"
#include "video_codec.h"
#define MAX_TILES 16
struct module;
static int init(struct module *parent, const char *cfg, void **state);
static void done(void *state);
static struct video_frame *filter(void *state, struct video_frame *in);
struct state_resize {
int num;
int orig_width;
int orig_height;
int denom;
double scale_factor;
int reinit;
struct video_frame *frame;
};
static void usage() {
printf("\nDownscaling by scale factor:\n\n");
printf("resize usage:\n");
printf("\tresize:numerator/denominator\n\n");
printf("Downscaling example: resize:1/2 - downscale input frame size by scale factor of 2\n");
}
static int init(struct module *parent, const char *cfg, void **state)
{
UNUSED(parent);
int n;
int denom = 1;;
if(cfg) {
if(strcasecmp(cfg, "help") == 0) {
usage();
return 1;
}
n = atoi(cfg);
if(strchr(cfg, '/')) {
denom = atoi(strchr(cfg, '/') + 1);
}
} else {
usage();
return -1;
}
if(n <= 0 || denom <= 0){
printf("\n[RESIZE ERROR] numerator and denominator resize factors must be greater than zero!\n");
usage();
return -1;
}
if(n/denom > 1.0){
printf("\n[RESIZE ERROR] numerator and denominator resize factors must be lower than 1 (only downscaling is supported)\n");
usage();
return -1;
}
struct state_resize *s = (state_resize*) calloc(1, sizeof(struct state_resize));
s->reinit = 1;
s->num = n;
s->denom = denom;
*state = s;
return 0;
}
static void done(void *state)
{
struct state_resize *s = (state_resize*) state;
vf_free(s->frame);
free(state);
}
static struct video_frame *filter(void *state, struct video_frame *in)
{
struct state_resize *s = (state_resize*) state;
unsigned int i;
int res = 0;
assert(in->tile_count <= MAX_TILES);
if(s->reinit==1){
struct video_desc desc = video_desc_from_frame(in);
s->frame = vf_alloc_desc_data(desc);
}
for(i=0; i<s->frame->tile_count;i++){
if(s->reinit==1){
//TODO: all tiles could have different sizes and other color specs different than UYVY and RGB
s->orig_width = s->frame->tiles[i].width;
s->orig_height = s->frame->tiles[i].height;
s->frame->tiles[i].width *= s->num;
s->frame->tiles[i].width /= s->denom;
s->frame->tiles[i].height *= s->num;
s->frame->tiles[i].height /= s->denom;
s->frame->color_spec = RGB;
s->scale_factor = (double)s->num/s->denom;
s->reinit = 0;
if(i==0) printf("[resize filter] resizing from %dx%d to %dx%d\n", in->tiles[i].width, in->tiles[i].height, s->frame->tiles[i].width, s->frame->tiles[i].height);
}
res = resize_frame(in->tiles[i].data, in->color_spec, s->frame->tiles[i].data, &s->frame->tiles[i].data_len, s->orig_width, s->orig_height, s->scale_factor);
if(res!=0){
error_msg("\n[RESIZE ERROR] Unable to resize with scale factor configured [%d/%d] in tile number %d\n", s->num, s->denom, i);
error_msg("\t\t No scale factor applied at all. No frame returns...\n");
return NULL;
}else{
s->frame->color_spec = RGB;
s->frame->codec = RGB;
}
}
return s->frame;
}
struct capture_filter_info capture_filter_resize = {
"resize",
init,
done,
filter,
};
#ifdef __cplusplus
}
#endif

View File

@@ -0,0 +1,57 @@
/*
* FILE: capture_filter/resize.h
* AUTHORS: Gerard Castillo <gerard.castillo@i2cat.net>
* Marc Palau <marc.palau@i2cat.net>
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the Fundació i2CAT,
* Internet I Innovació Digital a Catalunya. This product also includes
* software developed by CESNET z.s.p.o.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef CAPTURE_FILTER_RESIZE_H_
#define CAPTURE_FILTER_RESIZE_H_
#ifdef HAVE_CONFIG_H
#include "config.h"
#include "config_win32.h"
#include "config_unix.h"
#endif // HAVE_CONFIG_H
struct capture_filter_info;
extern struct capture_filter_info capture_filter_resize;
#endif // CAPTURE_FILTER_RESIZE_H_

View File

@@ -0,0 +1,77 @@
/*
* FILE: capture_filter/resize_utils.cpp
* AUTHORS: Gerard Castillo <gerard.castillo@i2cat.net>
* Marc Palau <marc.palau@i2cat.net>
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the Fundació i2CAT,
* Internet I Innovació Digital a Catalunya. This product also includes
* software developed by CESNET z.s.p.o.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "capture_filter/resize_utils.h"
using namespace cv;
int resize_frame(char *indata, codec_t in_color, char *outdata, unsigned int *data_len, unsigned int width, unsigned int height, double scale_factor){
assert(in_color == UYVY || in_color == RGB);
int res = 0;
Mat out, in, rgb;
if (indata == NULL || outdata == NULL || data_len == NULL) {
return 1;
}
switch(in_color){
case UYVY:
in.create(height, width, CV_8UC2);
in.data = (uchar*)indata;
cvtColor(in, rgb, CV_YUV2RGB_UYVY);
resize(rgb, out, Size(0,0), scale_factor, scale_factor, INTER_LINEAR);
break;
case RGB:
in.create(height, width, CV_8UC3);
in.data = (uchar*)indata;
resize(in, out, Size(0,0), scale_factor, scale_factor, INTER_LINEAR);
break;
}
*data_len = out.step * out.rows * sizeof(char);
memcpy(outdata,out.data,*data_len);
return res;
}

View File

@@ -0,0 +1,60 @@
/*
* FILE: capture_filter/resize_utils.c
* AUTHORS: Gerard Castillo <gerard.castillo@i2cat.net>
* Marc Palau <marc.palau@i2cat.net>
*
* Copyright (c) 2005-2010 Fundació i2CAT, Internet I Innovació Digital a Catalunya
*
* Redistribution and use in source and binary forms, with or without
* modification, is permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
*
* This product includes software developed by the Fundació i2CAT,
* Internet I Innovació Digital a Catalunya. This product also includes
* software developed by CESNET z.s.p.o.
*
* 4. Neither the name of the University nor of the Institute may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
* EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef RESIZE_UTILS_H_
#define RESIZE_UTILS_H_
#ifdef HAVE_CONFIG_H
#include "config.h"
#include "config_unix.h"
#include "config_win32.h"
#endif
#include <opencv/cv.hpp>
#include <opencv/cv.h>
#include "types.h"
int resize_frame(char *indata, codec_t in_color, char *outdata, unsigned int *data_len, unsigned int width, unsigned int height, double scale_factor);
#endif// RESIZE_UTILS_H_

View File

@@ -96,14 +96,14 @@
#include <mcheck.h>
#endif
#define EXIT_FAIL_USAGE 1
#define EXIT_FAIL_UI 2
#define EXIT_FAIL_DISPLAY 3
#define EXIT_FAIL_CAPTURE 4
#define EXIT_FAIL_NETWORK 5
#define EXIT_FAIL_TRANSMIT 6
#define EXIT_FAIL_COMPRESS 7
#define EXIT_FAIL_DECODER 8
#define EXIT_FAIL_USAGE 1
#define EXIT_FAIL_UI 2
#define EXIT_FAIL_DISPLAY 3
#define EXIT_FAIL_CAPTURE 4
#define EXIT_FAIL_NETWORK 5
#define EXIT_FAIL_TRANSMIT 6
#define EXIT_FAIL_COMPRESS 7
#define EXIT_FAIL_DECODER 8
#define PORT_BASE 5004
#define PORT_AUDIO 5006
@@ -433,6 +433,7 @@ int main(int argc, char *argv[])
const char *requested_audio_fec = DEFAULT_AUDIO_FEC;
char *audio_channel_map = NULL;
const char *audio_scale = "mixauto";
rtsp_serv_t* rtsp_server = NULL;
bool isStd = FALSE;
int recv_port_number = PORT_BASE;
int send_port_number = PORT_BASE;
@@ -460,8 +461,8 @@ int main(int argc, char *argv[])
pthread_t receiver_thread_id,
capture_thread_id;
bool receiver_thread_started = false,
capture_thread_started = false;
bool receiver_thread_started = false,
capture_thread_started = false;
unsigned display_flags = 0;
int ret;
struct vidcap_params *audio_cap_dev;
@@ -551,11 +552,11 @@ int main(int argc, char *argv[])
return 0;
}
requested_display = optarg;
if(strchr(optarg, ':')) {
char *delim = strchr(optarg, ':');
*delim = '\0';
display_cfg = delim + 1;
}
if(strchr(optarg, ':')) {
char *delim = strchr(optarg, ':');
*delim = '\0';
display_cfg = delim + 1;
}
break;
case 't':
if (!strcmp(optarg, "help")) {
@@ -633,9 +634,9 @@ int main(int argc, char *argv[])
requested_video_fec = optarg;
}
break;
case 'h':
usage();
return 0;
case 'h':
usage();
return 0;
case 'P':
if(strchr(optarg, ':')) {
char *save_ptr = NULL;
@@ -825,17 +826,17 @@ int main(int argc, char *argv[])
}
#ifdef WIN32
WSADATA wsaData;
int err = WSAStartup(MAKEWORD(2, 2), &wsaData);
if(err != 0) {
fprintf(stderr, "WSAStartup failed with error %d.", err);
return EXIT_FAILURE;
}
if(LOBYTE(wsaData.wVersion) != 2 || HIBYTE(wsaData.wVersion) != 2) {
fprintf(stderr, "Counld not found usable version of Winsock.\n");
WSACleanup();
return EXIT_FAILURE;
}
WSADATA wsaData;
int err = WSAStartup(MAKEWORD(2, 2), &wsaData);
if(err != 0) {
fprintf(stderr, "WSAStartup failed with error %d.", err);
return EXIT_FAILURE;
}
if(LOBYTE(wsaData.wVersion) != 2 || HIBYTE(wsaData.wVersion) != 2) {
fprintf(stderr, "Counld not found usable version of Winsock.\n");
WSACleanup();
return EXIT_FAILURE;
}
#endif
if(control_init(control_port, &control, &root_mod) != 0) {
@@ -951,10 +952,11 @@ int main(int argc, char *argv[])
display_device, requested_mtu,
argc, argv);
}else if (video_protocol == H264_STD) {
uint8_t avType;
if(strcmp("none", vidcap_params_get_driver(vidcap_params_head)) != 0 && (strcmp("none",audio_send) != 0)) avType = 0; //AVStream
else if((strcmp("none",audio_send) != 0)) avType = 2; //AStream
else avType = 1; //VStream
rtps_types_t avType;
if(strcmp("none", vidcap_params_get_driver(vidcap_params_head)) != 0 && (strcmp("none",audio_send) != 0)) avType = avStdDyn; //AVStream
else if((strcmp("none",audio_send) != 0)) avType = audioPCMUdyn; //AStream
else if(strcmp("none", vidcap_params_get_driver(vidcap_params_head))) avType = videoH264; //VStream
else printf("[RTSP SERVER CHECK] no stream type... check capture devices input...\n");
uv->state_video_rxtx = new h264_rtp_video_rxtx(&root_mod, video_exporter,
requested_compression, requested_encryption,
@@ -971,7 +973,6 @@ int main(int argc, char *argv[])
} else { // SAGE
uv->state_video_rxtx = new sage_video_rxtx(&root_mod, video_exporter,
requested_compression, requested_receiver, sage_opts);
}
if(rxtx_mode & MODE_RECEIVER) {
@@ -1057,6 +1058,10 @@ cleanup:
vidcap_params_head = next;
}
#ifdef HAVE_RTSP_SERVER
if(rtsp_server) c_stop_server(rtsp_server);
#endif
module_done(&root_mod);
free(uv);
@@ -1065,7 +1070,7 @@ cleanup:
#endif
#ifdef WIN32
WSACleanup();
WSACleanup();
#endif
printf("Exit\n");

View File

@@ -569,4 +569,108 @@ int decode_audio_frame(struct coded_data *cdata, void *data)
return TRUE;
}
/*
* Second version that uses external audio configuration,
* now it uses a struct state_audio_decoder instead an audio_frame2.
* It does multi-channel handling.
*/
int decode_audio_frame_mulaw(struct coded_data *cdata, void *data)
{
struct pbuf_audio_data *s = (struct pbuf_audio_data *) data;
struct state_audio_decoder *audio = s->decoder;
//struct state_audio_decoder *audio = (struct state_audio_decoder *)data;
if(!cdata) return false;
// Reconfiguration.
if (audio->received_frame->bps != audio->saved_desc.bps ||
audio->received_frame->sample_rate != audio->saved_desc.sample_rate ||
audio->received_frame->ch_count != audio->saved_desc.ch_count ||
audio->received_frame->codec != audio->saved_desc.codec)
{
audio_frame2_allocate(audio->received_frame, audio->saved_desc.ch_count, audio->saved_desc.sample_rate * audio->saved_desc.bps);
audio->received_frame->bps = audio->saved_desc.bps;
audio->received_frame->sample_rate = audio->saved_desc.sample_rate;
audio->received_frame->ch_count = audio->saved_desc.ch_count;
audio->received_frame->codec = audio->saved_desc.codec;
}
// Initial setup
for (int ch = 0 ; ch < audio->received_frame->ch_count ; ch ++) {
audio->received_frame->data_len[ch] = 0;
}
// Check-if-there-is-only-one-channel optimization.
if (audio->received_frame->ch_count == 1) {
char *to = audio->received_frame->data[0];
while (cdata != NULL) {
// Get the data to copy into the received_frame.
char *from = cdata->data->data;
// See if the data fits.
if (cdata->data->data_len <= (int)(audio->received_frame->max_size - audio->received_frame->data_len[0])) {
// Copy the data
memcpy(to, from, cdata->data->data_len);
// Update the pointer and the counter.
to += cdata->data->data_len;
audio->received_frame->data_len[0] += cdata->data->data_len;
} else {
// Filled it out, exit now.
return true;
}
cdata = cdata->nxt;
}
} else { // Multi-channel case.
/*
* Unoptimized version of the multi-channel handling.
* TODO: Optimize it! It's a matrix transpose.
* Take a look at http://stackoverflow.com/questions/1777901/array-interleaving-problem
*/
char *to;
int bytes_copied = 0;
while (cdata != NULL) {
// Check that the amount of data on cdata->data->data is congruent with 0 modulus audio->received_frame->ch_count.
if (cdata->data->data_len % audio->received_frame->ch_count != 0) {
// printf something?
return false;
}
// If there is space on the current audio_frame2 buffer.
if ((cdata->data->data_len / audio->received_frame->ch_count) <= (int)(audio->received_frame->max_size - bytes_copied)) {
char *from = cdata->data->data;
// For each group of samples.
for (int g = 0 ; g < (cdata->data->data_len / audio->received_frame->ch_count) ; g++) {
// Iterate throught each channel.
for (int ch = 0 ; ch < audio->received_frame->ch_count ; ch ++) {
// Copy the current sample from the RTP packet to the audio_frame2.
to = audio->received_frame->data[ch];
to += bytes_copied;
memcpy(to, from, audio->received_frame->bps);
from += audio->received_frame->bps;
audio->received_frame->data_len[ch] += audio->received_frame->bps;
}
bytes_copied += audio->received_frame->bps;
}
} else {
// Filled audio_frame2 out, exit now.
return true;
}
cdata = cdata->nxt;
}
}
return true;
}

View File

@@ -54,6 +54,7 @@
struct coded_data;
int decode_audio_frame(struct coded_data *cdata, void *data);
int decode_audio_frame_mulaw(struct coded_data *cdata, void *data);
void *audio_decoder_init(char *audio_channel_map, const char *audio_scale,
const char *encryption);
void audio_decoder_destroy(void *state);

View File

@@ -96,6 +96,7 @@ struct pbuf {
uint32_t last_display_ts;
};
static void free_cdata(struct coded_data *head);
static int frame_complete(struct pbuf_node *frame);
/*********************************************************************************/
@@ -173,31 +174,60 @@ struct pbuf *pbuf_init(void)
static void add_coded_unit(struct pbuf_node *node, rtp_packet * pkt)
{
/* Add "pkt" to the frame represented by "node". The "node" has */
/* previously been created, and has some coded data already... */
/* Add "pkt" to the frame represented by "node". The "node" has */
/* previously been created, and has some coded data already... */
/* New arrivals are added at the head of the list, which is stored */
/* in descending order of packets as they arrive (NOT necessarily */
/* descending sequence number order, as the network might reorder) */
/* New arrivals are added at the head of the list, which is stored */
/* in descending order of packets as they arrive (NOT necessarily */
/* descending sequence number order, as the network might reorder) */
struct coded_data *tmp;
struct coded_data *tmp, *curr, *prv;
assert(node->rtp_timestamp == pkt->ts);
assert(node->cdata != NULL);
assert(node->rtp_timestamp == pkt->ts);
assert(node->cdata != NULL);
tmp = malloc(sizeof(struct coded_data));
if (tmp != NULL) {
tmp->seqno = pkt->seq;
tmp->data = pkt;
tmp->prv = NULL;
tmp->nxt = node->cdata;
node->cdata->prv = tmp;
node->cdata = tmp;
node->mbit |= pkt->m;
tmp = malloc(sizeof(struct coded_data));
if (tmp == NULL) {
/* this is bad, out of memory, drop the packet... */
free(pkt);
return;
}
tmp->seqno = pkt->seq;
tmp->data = pkt;
node->mbit |= pkt->m;
if((int16_t)(tmp->seqno - node->cdata->seqno) > 0){
tmp->prv = NULL;
tmp->nxt = node->cdata;
node->cdata->prv = tmp;
node->cdata = tmp;
} else {
curr = node->cdata;
if (curr == NULL){
/* this is bad, out of memory, drop the packet... */
free(pkt);
free_cdata(tmp);
} else {
/* this is bad, out of memory, drop the packet... */
while (curr != NULL && ((int16_t)(tmp->seqno - curr->seqno) < 0)){
prv = curr;
curr = curr->nxt;
}
if (curr == NULL) {
tmp->nxt = NULL;
tmp->prv = prv;
prv->nxt = tmp;
}else if ((int16_t)(tmp->seqno - curr->seqno) > 0){
tmp->nxt = curr;
tmp->prv = curr->prv;
tmp->prv->nxt = tmp;
curr->prv = tmp;
} else {
/* this is bad, something went terribly wrong... */
free(pkt);
free_cdata(tmp);
}
}
}
}
static struct pbuf_node *create_new_pnode(rtp_packet * pkt, double playout_delay)
@@ -239,6 +269,8 @@ static struct pbuf_node *create_new_pnode(rtp_packet * pkt, double playout_delay
void pbuf_insert(struct pbuf *playout_buf, rtp_packet * pkt)
{
struct pbuf_node *tmp;
struct pbuf_node *curr;
pbuf_validate(playout_buf);
// collect statistics
@@ -283,33 +315,43 @@ void pbuf_insert(struct pbuf *playout_buf, rtp_packet * pkt)
return;
}
if (playout_buf->last->rtp_timestamp == pkt->ts) {
/* Packet belongs to last frame in playout_buf this is the */
/* most likely scenario - although... */
add_coded_unit(playout_buf->last, pkt);
if (playout_buf->last->rtp_timestamp < pkt->ts) {
/* Packet belongs to a new frame... */
tmp = create_new_pnode(pkt, playout_buf->playout_delay);
playout_buf->last->nxt = tmp;
tmp->prv = playout_buf->last;
playout_buf->last = tmp;
} else {
if (playout_buf->last->rtp_timestamp < pkt->ts) {
/* Packet belongs to a new frame... */
tmp = create_new_pnode(pkt, playout_buf->playout_delay);
playout_buf->last->nxt = tmp;
playout_buf->last->completed = true;
tmp->prv = playout_buf->last;
playout_buf->last = tmp;
} else {
/* Packet belongs to a previous frame... */
if (playout_buf->frst->rtp_timestamp > pkt->ts) {
debug_msg("A very old packet - discarded\n");
} else {
debug_msg
("A packet for a previous frame, but might still be useful\n");
/* Should probably insert this into the playout buffer here... */
}
if (pkt->m) {
debug_msg
("Oops... dropped packet with M bit set\n");
}
free(pkt);
/* Packet belongs to a previous frame... */
curr = playout_buf->last;
while(curr != playout_buf->frst && curr->rtp_timestamp > pkt->ts){
curr = curr->prv;
}
if (curr->rtp_timestamp == pkt->ts) {
/* Packet belongs to a previous existing frame... */
add_coded_unit(curr, pkt);
} else if (curr->rtp_timestamp < pkt->ts){
/* Packet belongs to a new previous frame */
tmp = create_new_pnode(pkt, playout_buf->playout_delay);
tmp->nxt = curr->nxt;
tmp->prv = curr;
curr->nxt->prv = tmp;
curr->nxt = tmp;
} else if (curr == playout_buf->frst) {
tmp = create_new_pnode(pkt, playout_buf->playout_delay);
tmp->nxt = playout_buf->frst;
curr->prv = tmp;
playout_buf->frst = tmp;
} else {
if (pkt->m) {
debug_msg
("Oops... dropped packet with M bit set\n");
}
free(pkt);
}
}
pbuf_validate(playout_buf);
}

View File

@@ -256,7 +256,7 @@ int rtp_send_data_hdr(struct rtp *session,
uint32_t rtp_ts, char pt, int m,
int cc, uint32_t csrc[],
char *phdr, int phdr_len,
char *data, int data_len,
char *data, int data_len,
char *extn, uint16_t extn_len, uint16_t extn_type);
void rtp_send_ctrl(struct rtp *session, uint32_t rtp_ts,
rtcp_app_callback appcallback, struct timeval curr_time);

View File

@@ -50,12 +50,16 @@
#include "rtp/rtp_callback.h"
#include "rtp/pbuf.h"
#include "rtp/rtpdec_h264.h"
#include "utils/h264_stream.h"
#include "utils/bs.h"
#include "video_frame.h"
static const uint8_t start_sequence[] = { 0, 0, 0, 1 };
int decode_frame_h264(struct coded_data *cdata, void *rx_data) {
int fill_coded_frame_from_sps(struct video_frame *rx_data, unsigned char *data, int data_len);
int decode_frame_h264(struct coded_data *cdata, void *decode_data) {
rtp_packet *pckt = NULL;
int substream = 0;
struct coded_data *orig = cdata;
uint8_t nal;
@@ -65,19 +69,21 @@ int decode_frame_h264(struct coded_data *cdata, void *rx_data) {
int pass;
int total_length = 0;
char *dst = NULL;
unsigned char *dst = NULL;
int src_len;
struct std_frame_received *buffers = (struct std_frame_received *) rx_data;
struct video_frame *frame = (struct video_frame *)decode_data;
frame->h264_frame_type = BFRAME;
for (pass = 0; pass < 2; pass++) {
if (pass > 0) {
cdata = orig;
buffers->buffer_len = total_length;
dst = buffers->frame_buffer + total_length;
buffers->bframe = TRUE;
buffers->iframe = FALSE;
if(frame->h264_frame_type == INTRA){
total_length+=frame->h264_offset_len;
}
frame->h264_buffer_len = total_length;
dst = frame->h264_buffer + total_length;
}
while (cdata != NULL) {
@@ -92,132 +98,133 @@ int decode_frame_h264(struct coded_data *cdata, void *rx_data) {
type = nal & 0x1f;
nri = nal & 0x60;
if (type == 7){
fill_coded_frame_from_sps(frame, (unsigned char*) pckt->data, pckt->data_len);
}
if (type >= 1 && type <= 23) {
if (buffers->bframe && !(type == 1 && nri == 0)){
buffers->bframe = FALSE;
}
if (!buffers->iframe && type == 5 ){
buffers->iframe =TRUE;
if(frame->h264_frame_type != INTRA && (type == 5 || type == 6)) {
frame->h264_frame_type = INTRA;
} else if (frame->h264_frame_type == BFRAME && nri != 0){
frame->h264_frame_type = OTHER;
}
type = 1;
}
const uint8_t *src = NULL;
switch (type) {
case 0:
case 1:
if (pass == 0) {
debug_msg("NAL type 1\n");
total_length += sizeof(start_sequence) + pckt->data_len;
} else {
dst -= pckt->data_len + sizeof(start_sequence);
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), pckt->data, pckt->data_len);
unsigned char *dst2 = (unsigned char *)dst;
}
break;
case 24:
src = (const uint8_t *) pckt->data;
src_len = pckt->data_len;
src++;
src_len--;
while (src_len > 2) {
//TODO: Not properly tested
//TODO: bframes and iframes detection
uint16_t nal_size;
memcpy(&nal_size, src, sizeof(uint16_t));
src += 2;
src_len -= 2;
if (nal_size <= src_len) {
if (pass == 0) {
total_length += sizeof(start_sequence) + nal_size;
} else {
dst -= nal_size + sizeof(start_sequence);
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), src, nal_size);
}
case 0:
case 1:
if (pass == 0) {
debug_msg("NAL type 1\n");
total_length += sizeof(start_sequence) + pckt->data_len;
} else {
error_msg("NAL size exceeds length: %u %d\n", nal_size, src_len);
return FALSE;
dst -= pckt->data_len + sizeof(start_sequence);
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), pckt->data, pckt->data_len);
}
src += nal_size;
src_len -= nal_size;
break;
case 24:
src = (const uint8_t *) pckt->data;
src_len = pckt->data_len;
if (src_len < 0) {
error_msg("Consumed more bytes than we got! (%d)\n", src_len);
return FALSE;
src++;
src_len--;
while (src_len > 2) {
//TODO: Not properly tested
//TODO: bframes and iframes detection
uint16_t nal_size;
memcpy(&nal_size, src, sizeof(uint16_t));
src += 2;
src_len -= 2;
if (nal_size <= src_len) {
if (pass == 0) {
total_length += sizeof(start_sequence) + nal_size;
} else {
dst -= nal_size + sizeof(start_sequence);
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), src, nal_size);
}
} else {
error_msg("NAL size exceeds length: %u %d\n", nal_size, src_len);
return FALSE;
}
src += nal_size;
src_len -= nal_size;
if (src_len < 0) {
error_msg("Consumed more bytes than we got! (%d)\n", src_len);
return FALSE;
}
}
}
break;
break;
case 25:
case 26:
case 27:
case 29:
error_msg("Unhandled NAL type\n");
return FALSE;
error_msg("Unhandled NAL type\n");
return FALSE;
case 28:
src = (const uint8_t *) pckt->data;
src_len = pckt->data_len;
src = (const uint8_t *) pckt->data;
src_len = pckt->data_len;
src++;
src_len--;
if (src_len > 1) {
uint8_t fu_header = *src;
uint8_t start_bit = fu_header >> 7;
//uint8_t end_bit = (fu_header & 0x40) >> 6;
uint8_t nal_type = fu_header & 0x1f;
uint8_t reconstructed_nal;
if (buffers->bframe && !(nal_type == 1 && nri == 0)){
buffers->bframe = FALSE;
}
if (!buffers->iframe && nal_type == 5){
buffers->iframe = TRUE;
}
// Reconstruct this packet's true nal; only the data follows.
/* The original nal forbidden bit and NRI are stored in this
* packet's nal. */
reconstructed_nal = nal & 0xe0;
reconstructed_nal |= nal_type;
// skip the fu_header
src++;
src_len--;
if (pass == 0) {
if (start_bit) {
total_length += sizeof(start_sequence) + sizeof(reconstructed_nal) + src_len;
if (src_len > 1) {
uint8_t fu_header = *src;
uint8_t start_bit = fu_header >> 7;
//uint8_t end_bit = (fu_header & 0x40) >> 6;
uint8_t nal_type = fu_header & 0x1f;
uint8_t reconstructed_nal;
if(frame->h264_frame_type != INTRA && (nal_type == 5 || nal_type == 6)){
frame->h264_frame_type = INTRA;
} else if (frame->h264_frame_type == BFRAME && nri != 0){
frame->h264_frame_type = OTHER;
}
// Reconstruct this packet's true nal; only the data follows.
/* The original nal forbidden bit and NRI are stored in this
* packet's nal. */
reconstructed_nal = nal & 0xe0;
reconstructed_nal |= nal_type;
// skip the fu_header
src++;
src_len--;
if (pass == 0) {
if (start_bit) {
total_length += sizeof(start_sequence) + sizeof(reconstructed_nal) + src_len;
} else {
total_length += src_len;
}
} else {
total_length += src_len;
if (start_bit) {
dst -= sizeof(start_sequence) + sizeof(reconstructed_nal) + src_len;
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), &reconstructed_nal, sizeof(reconstructed_nal));
memcpy(dst + sizeof(start_sequence) + sizeof(reconstructed_nal), src, src_len);
} else {
dst -= src_len;
memcpy(dst, src, src_len);
}
}
} else {
if (start_bit) {
dst -= sizeof(start_sequence) + sizeof(reconstructed_nal) + src_len;
memcpy(dst, start_sequence, sizeof(start_sequence));
memcpy(dst + sizeof(start_sequence), &reconstructed_nal, sizeof(reconstructed_nal));
memcpy(dst + sizeof(start_sequence) + sizeof(reconstructed_nal), src, src_len);
} else {
dst -= src_len;
memcpy(dst, src, src_len);
}
error_msg("Too short data for FU-A H264 RTP packet\n");
return FALSE;
}
} else {
error_msg("Too short data for FU-A H264 RTP packet\n");
return FALSE;
}
break;
break;
default:
error_msg("Unknown NAL type\n");
return FALSE;
error_msg("Unknown NAL type\n");
return FALSE;
}
cdata = cdata->nxt;
}
@@ -225,3 +232,86 @@ int decode_frame_h264(struct coded_data *cdata, void *rx_data) {
return TRUE;
}
int fill_coded_frame_from_sps(struct video_frame *rx_data, unsigned char *data, int data_len){
uint32_t width, height;
sps_t* sps = (sps_t*)malloc(sizeof(sps_t));
uint8_t* rbsp_buf = (uint8_t*)malloc(data_len);
if (nal_to_rbsp(data, &data_len, rbsp_buf, &data_len) < 0){
free(rbsp_buf);
free(sps);
return -1;
}
bs_t* b = bs_new(rbsp_buf, data_len);
if(read_seq_parameter_set_rbsp(sps,b) < 0){
bs_free(b);
free(rbsp_buf);
free(sps);
return -1;
}
width = (sps->pic_width_in_mbs_minus1 + 1) * 16;
height = (2 - sps->frame_mbs_only_flag) * (sps->pic_height_in_map_units_minus1 + 1) * 16;
//NOTE: frame_mbs_only_flag = 1 --> only progressive frames
// frame_mbs_only_flag = 0 --> some type of interlacing (there are 3 types contemplated in the standard)
if (sps->frame_cropping_flag){
width -= (sps->frame_crop_left_offset*2 + sps->frame_crop_right_offset*2);
height -= (sps->frame_crop_top_offset*2 + sps->frame_crop_bottom_offset*2);
}
if((width != rx_data->h264_width) || (height != rx_data->h264_height)) {
rx_data->h264_width = width;
rx_data->h264_height = height;
vf_get_tile(rx_data, 0)->width = width;
vf_get_tile(rx_data, 0)->height = height;
// free(rx_data->tiles[0].data);
// rx_data->tiles[0].data = calloc(1, rx_data->h264_width * rx_data->h264_height);
}
bs_free(b);
free(rbsp_buf);
free(sps);
return 0;
}
int width_height_from_SDP(int *widthOut, int *heightOut , unsigned char *data, int data_len){
uint32_t width, height;
sps_t* sps = (sps_t*)malloc(sizeof(sps_t));
uint8_t* rbsp_buf = (uint8_t*)malloc(data_len);
if (nal_to_rbsp(data, &data_len, rbsp_buf, &data_len) < 0){
free(rbsp_buf);
free(sps);
return -1;
}
bs_t* b = bs_new(rbsp_buf, data_len);
if(read_seq_parameter_set_rbsp(sps,b) < 0){
bs_free(b);
free(rbsp_buf);
free(sps);
return -1;
}
width = (sps->pic_width_in_mbs_minus1 + 1) * 16;
height = (2 - sps->frame_mbs_only_flag) * (sps->pic_height_in_map_units_minus1 + 1) * 16;
//NOTE: frame_mbs_only_flag = 1 --> only progressive frames
// frame_mbs_only_flag = 0 --> some type of interlacing (there are 3 types contemplated in the standard)
if (sps->frame_cropping_flag){
width -= (sps->frame_crop_left_offset*2 + sps->frame_crop_right_offset*2);
height -= (sps->frame_crop_top_offset*2 + sps->frame_crop_bottom_offset*2);
}
debug_msg("\n\n[width_height_from_SDP] width: %d height: %d\n\n",width,height);
if(width > 0){
*widthOut = width;
}
if(height > 0){
*heightOut = height;
}
bs_free(b);
free(rbsp_buf);
free(sps);
return 0;
}

View File

@@ -44,15 +44,7 @@
#ifndef _RTP_DEC_H264_H
#define _RTP_DEC_H264_H
struct std_frame_received {
uint32_t buffer_len; //[MAX_SUBSTREAMS];
//uint32_t buffer_num;//[MAX_SUBSTREAMS];
char *frame_buffer; //[MAX_SUBSTREAMS];
uint8_t bframe;
uint8_t iframe;
};
int
decode_frame_h264(struct coded_data *cdata, void *rx_data);
int decode_frame_h264(struct coded_data *cdata, void *decode_data);
int width_height_from_SDP(int *widthOut, int *heightOut , unsigned char *data, int data_len);
#endif

View File

@@ -48,7 +48,7 @@
BasicRTSPOnlyServer *BasicRTSPOnlyServer::srvInstance = NULL;
BasicRTSPOnlyServer::BasicRTSPOnlyServer(int port, struct module *mod, uint8_t avType){
BasicRTSPOnlyServer::BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType){
if(mod == NULL){
exit(1);
}
@@ -61,7 +61,7 @@ BasicRTSPOnlyServer::BasicRTSPOnlyServer(int port, struct module *mod, uint8_t a
}
BasicRTSPOnlyServer*
BasicRTSPOnlyServer::initInstance(int port, struct module *mod, uint8_t avType){
BasicRTSPOnlyServer::initInstance(int port, struct module *mod, rtps_types_t avType){
if (srvInstance != NULL){
return srvInstance;
}
@@ -78,10 +78,13 @@ BasicRTSPOnlyServer::getInstance(){
int BasicRTSPOnlyServer::init_server() {
if (env != NULL || rtspServer != NULL || mod == NULL || (avType > 2 && avType < 0)){
if (env != NULL || rtspServer != NULL || mod == NULL || (avType >= NUM_RTSP_FORMATS && avType < 0)){
exit(1);
}
//setting livenessTimeoutTask
unsigned reclamationTestSeconds = 35;
TaskScheduler* scheduler = BasicTaskScheduler::createNew();
env = BasicUsageEnvironment::createNew(*scheduler);
@@ -98,7 +101,7 @@ int BasicRTSPOnlyServer::init_server() {
fPort = 8554;
}
rtspServer = RTSPServer::createNew(*env, fPort, authDB);
rtspServer = RTSPServer::createNew(*env, fPort, authDB, reclamationTestSeconds);
if (rtspServer == NULL) {
*env << "Failed to create RTSP server: " << env->getResultMsg() << "\n";
exit(1);
@@ -108,11 +111,16 @@ int BasicRTSPOnlyServer::init_server() {
"UltraGrid RTSP server enabling standard transport",
"UltraGrid RTSP server");
if(avType == 0){
if(avType == avStdDyn){
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, audioPCMUdyn));
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, videoH264));
}else if(avType == avStd){
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, 2));
::createNew(*env, True, mod, audioPCMUstd));
sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, 1));
::createNew(*env, True, mod, videoH264));
}else sms->addSubsession(BasicRTSPOnlySubsession
::createNew(*env, True, mod, avType));

View File

@@ -47,16 +47,17 @@
#include <RTSPServer.hh>
#include <BasicUsageEnvironment.hh>
#include "rtsp/rtsp_utils.h"
#include "module.h"
class BasicRTSPOnlyServer {
private:
BasicRTSPOnlyServer(int port, struct module *mod, uint8_t avType);
BasicRTSPOnlyServer(int port, struct module *mod, rtps_types_t avType);
public:
static BasicRTSPOnlyServer* initInstance(int port, struct module *mod, uint8_t avType);
static BasicRTSPOnlyServer* initInstance(int port, struct module *mod, rtps_types_t avType);
static BasicRTSPOnlyServer* getInstance();
int init_server();
@@ -70,7 +71,7 @@ private:
static BasicRTSPOnlyServer* srvInstance;
int fPort;
struct module *mod;
uint8_t avType;
rtps_types_t avType;
RTSPServer* rtspServer;
UsageEnvironment* env;
};

View File

@@ -52,14 +52,14 @@
BasicRTSPOnlySubsession*
BasicRTSPOnlySubsession::createNew(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod, uint8_t avType){
struct module *mod, rtps_types_t avType){
return new BasicRTSPOnlySubsession(env, reuseFirstSource, mod, avType);
}
BasicRTSPOnlySubsession
::BasicRTSPOnlySubsession(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod, uint8_t avType)
struct module *mod, rtps_types_t avType)
: ServerMediaSubsession(env),
fSDPLines(NULL),
fReuseFirstSource(reuseFirstSource), fLastStreamToken(NULL) {
@@ -89,7 +89,7 @@ void BasicRTSPOnlySubsession
::setSDPLines() {
//TODO: should be more dynamic
//VStream
if(avType == 1){
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
unsigned estBitrate = 5000;
char const* mediaType = "video";
uint8_t rtpPayloadType = 96;
@@ -123,7 +123,7 @@ void BasicRTSPOnlySubsession
fSDPLines = sdpLines;
}
//AStream
if(avType == 2){
if(avType == audioPCMUdyn || avType == avStdDyn){
unsigned estBitrate = 384;
char const* mediaType = "audio";
uint8_t rtpPayloadType = 97;
@@ -172,7 +172,7 @@ void BasicRTSPOnlySubsession::getStreamParameters(unsigned clientSessionId,
Port& serverRTCPPort,
void*& streamToken) {
if(Vdestination == NULL && avType == 1){
if(Vdestination == NULL && (avType == videoH264 || avType == avStdDyn || avType == avStd)){
if (fSDPLines == NULL){
setSDPLines();
}
@@ -183,7 +183,7 @@ void BasicRTSPOnlySubsession::getStreamParameters(unsigned clientSessionId,
destinationAddr.s_addr = destinationAddress;
Vdestination = new Destinations(destinationAddr, clientRTPPort,clientRTCPPort);
}
if(Adestination == NULL && avType == 2){
if(Adestination == NULL && (avType == audioPCMUdyn || avType == avStdDyn)){
if (fSDPLines == NULL){
setSDPLines();
}
@@ -205,15 +205,12 @@ void BasicRTSPOnlySubsession::startStream(unsigned clientSessionId,
unsigned& rtpTimestamp,
ServerRequestAlternativeByteHandler* serverRequestAlternativeByteHandler,
void* serverRequestAlternativeByteHandlerClientData) {
struct response *resp = NULL;
if ((Vdestination == NULL && Adestination == NULL)){
return;
} else{
struct response *resp = NULL;
char pathV[1024];
char pathA[1024];
if (Vdestination != NULL){
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
char pathV[1024];
if(avType == 1 && Vdestination != NULL){
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
@@ -237,9 +234,12 @@ void BasicRTSPOnlySubsession::startStream(unsigned clientSessionId,
resp = send_message(fmod, pathV, (struct message *) msgV2);
resp = NULL;
}
if(avType == 2 && Adestination != NULL){
struct msg_sender *msg = (struct msg_sender *)
new_message(sizeof(struct msg_sender));
}
if(Adestination != NULL){
if(avType == audioPCMUdyn || avType == avStdDyn){
char pathA[1024];
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO, MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);
@@ -267,15 +267,10 @@ void BasicRTSPOnlySubsession::startStream(unsigned clientSessionId,
}
void BasicRTSPOnlySubsession::deleteStream(unsigned clientSessionId, void*& streamToken){
if ((Vdestination == NULL && Adestination == NULL)){
return;
} else {
char pathV[1024];
char pathA[1024];
Vdestination = NULL;
Adestination = NULL;
if(avType == 1 || Vdestination != NULL){
if (Vdestination != NULL){
if(avType == videoH264 || avType == avStdDyn || avType == avStd){
char pathV[1024];
Vdestination = NULL;
memset(pathV, 0, sizeof(pathV));
enum module_class path_sender[] = { MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathV, sizeof(pathV), path_sender);
@@ -295,7 +290,12 @@ void BasicRTSPOnlySubsession::deleteStream(unsigned clientSessionId, void*& stre
msgV2->type = SENDER_MSG_CHANGE_RECEIVER;
send_message(fmod, pathV, (struct message *) msgV2);
}
if(avType == 2 || Adestination != NULL){
}
if(Adestination != NULL){
if(avType == audioPCMUdyn || avType == avStdDyn){
char pathA[1024];
Adestination = NULL;
memset(pathA, 0, sizeof(pathA));
enum module_class path_sender[] = { MODULE_CLASS_AUDIO, MODULE_CLASS_SENDER, MODULE_CLASS_NONE };
append_message_path(pathA, sizeof(pathA), path_sender);

View File

@@ -52,6 +52,7 @@
extern "C" {
#endif
#include "rtsp/rtsp_utils.h"
#include "module.h"
#include "control_socket.h"
@@ -91,12 +92,12 @@ public:
createNew(UsageEnvironment& env,
Boolean reuseFirstSource,
struct module *mod,
uint8_t avType);
rtps_types_t avType);
protected:
BasicRTSPOnlySubsession(UsageEnvironment& env, Boolean reuseFirstSource,
struct module *mod, uint8_t avType);
struct module *mod, rtps_types_t avType);
virtual ~BasicRTSPOnlySubsession();
@@ -139,7 +140,7 @@ private:
void* fLastStreamToken;
char fCNAME[100];
struct module *fmod;
uint8_t avType;
rtps_types_t avType;
};

View File

@@ -57,7 +57,7 @@ int c_start_server(rtsp_serv_t* server){
return ret;
}
rtsp_serv_t *init_rtsp_server(unsigned int port, struct module *mod, uint8_t avType){
rtsp_serv_t *init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType){
rtsp_serv_t *server = (rtsp_serv_t*) malloc(sizeof(rtsp_serv_t));
server->port = port;
server->mod = mod;

View File

@@ -59,6 +59,7 @@ extern "C" {
#include "control_socket.h"
#include "module.h"
#include "debug.h"
#include "rtsp/rtsp_utils.h"
#ifdef __cplusplus
}
@@ -77,14 +78,14 @@ EXTERNC typedef struct rtsp_serv {
pthread_t server_th;
uint8_t watch;
uint8_t run;
uint8_t avType;
rtps_types_t avType;
} rtsp_serv_t;
EXTERNC int c_start_server(rtsp_serv_t* server);
EXTERNC void c_stop_server(rtsp_serv_t* server);
EXTERNC rtsp_serv_t* init_rtsp_server(unsigned int port, struct module *mod, uint8_t avType);
EXTERNC rtsp_serv_t* init_rtsp_server(unsigned int port, struct module *mod, rtps_types_t avType);
#undef EXTERNC

17
src/rtsp/rtsp_utils.h Normal file
View File

@@ -0,0 +1,17 @@
#ifndef _RTSP_TYPES_HH
#define _RTSP_TYPES_HH
typedef enum {
none,
avStd,
avStdDyn,
avUG,
videoH264,
videoUG,
audioPCMUstd,
audioPCMUdyn,
NUM_RTSP_FORMATS
}rtps_types_t;
#endif

View File

@@ -154,7 +154,7 @@ struct tx {
// Mulaw audio memory reservation
static void init_tx_mulaw_buffer() {
if (!buffer_mulaw_init) {
data_buffer_mulaw = malloc(BUFFER_MTU_SIZE);
data_buffer_mulaw = malloc(BUFFER_MTU_SIZE*20);
buffer_mulaw_init = 1;
}
}
@@ -379,7 +379,6 @@ void format_video_header(struct video_frame *frame, int tile_idx, int buffer_idx
void
tx_send_tile(struct tx *tx, struct video_frame *frame, int pos, struct rtp *rtp_session)
{
struct tile *tile;
int last = FALSE;
uint32_t ts = 0;
int fragment_offset = 0;
@@ -389,7 +388,6 @@ tx_send_tile(struct tx *tx, struct video_frame *frame, int pos, struct rtp *rtp_
platform_spin_lock(&tx->spin);
tile = vf_get_tile(frame, pos);
ts = get_local_mediatime();
if(frame->fragment &&
tx->last_frame_fragment_id == frame->frame_fragment_id) {
@@ -763,6 +761,8 @@ void audio_tx_send(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer
*/
void audio_tx_send_mulaw(struct tx* tx, struct rtp *rtp_session, audio_frame2 * buffer)
{
assert(buffer->codec == AC_MULAW);
int pt;
uint32_t timestamp;
@@ -772,7 +772,6 @@ void audio_tx_send_mulaw(struct tx* tx, struct rtp *rtp_session, audio_frame2 *
// 8000 Hz, 1 channel is the ITU-T G.711 standard
// More channels or Hz goes to DynRTP-Type97
//TODO CHECK ACTUAL CHCOUNT IN ORDER TO PROPERLY CREATE PAYLOAD TYPE
if (buffer->ch_count == 1 && buffer->sample_rate == 8000) {
pt = PT_ITU_T_G711_PCMU;
} else {
@@ -782,50 +781,45 @@ void audio_tx_send_mulaw(struct tx* tx, struct rtp *rtp_session, audio_frame2 *
// The sizes for the different audio_frame2 channels must be the same.
for (int i = 1 ; i < buffer->ch_count ; i++) assert(buffer->data_len[0] == buffer->data_len[i]);
int data_len = buffer->data_len[0] * buffer->ch_count; /* Number of samples to send (bps=1)*/
int data_len = buffer->data_len[0] * buffer->ch_count; /* Number of samples to send */
int data_remainig = data_len;
int payload_size = tx->mtu - 40; /* Max size of an RTP payload field */
int packets = data_len / payload_size;
if (data_len % payload_size != 0) packets++; /* Number of RTP packets needed */
int payload_size = tx->mtu - 40; /* Max size of an RTP payload field */
init_tx_mulaw_buffer();
char *curr_sample = data_buffer_mulaw;
// For each interval that fits in an RTP payload field.
for (int p = 0 ; p < packets ; p++) {
int ch, pos = 0, count = 0, pointerToSend = 0;
int samples_per_packet;
int data_to_send;
if (data_remainig >= payload_size) {
samples_per_packet = payload_size / buffer->ch_count;
data_to_send = payload_size;
}
else {
samples_per_packet = data_remainig / buffer->ch_count;
data_to_send = data_remainig;
}
do{
for(ch = 0; ch < buffer->ch_count; ch++){
memcpy(curr_sample, buffer->data[ch] + pos, buffer->bps * sizeof(char));
curr_sample += buffer->bps * sizeof(char);
count+=buffer->bps * sizeof(char);
data_remainig--;
}
pos += buffer->bps * sizeof(char);
// Interleave the samples
for (int ch_sample = 0 ; ch_sample < samples_per_packet ; ch_sample++){
for (int ch = 0 ; ch < buffer->ch_count ; ch++) {
//TODO to be checked prepiously -> if(buffer->data[ch]!=NULL){
memcpy(curr_sample, (char *)(buffer->data[ch] + ch_sample), sizeof(uint8_t));
curr_sample += sizeof(uint8_t);
data_remainig--;
// }
}
}
if((pos * buffer->ch_count) % payload_size == 0){
// Update first sample timestamp
timestamp = get_std_audio_local_mediatime((buffer->data_len[0] - (data_remainig/(buffer->bps * buffer->ch_count))));
// Send the packet
rtp_send_data(rtp_session, timestamp, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend, payload_size,
0, 0, 0);
pointerToSend += payload_size;
}
}while(count < data_len);
// Update first sample timestamp
timestamp = get_std_audio_local_mediatime((buffer->data_len[0] - (data_remainig/buffer->ch_count)));
// Send the packet
rtp_send_data(rtp_session, timestamp, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw, data_to_send,
0, 0, 0);
if((pos * buffer->ch_count) % payload_size != 0){
// Update first sample timestamp
timestamp = get_std_audio_local_mediatime((buffer->data_len[0] - (data_remainig/(buffer->bps * buffer->ch_count))));
// Send the packet
rtp_send_data(rtp_session, timestamp, pt, 0, 0, /* contributing sources */
0, /* contributing sources length */
data_buffer_mulaw + pointerToSend , (pos * buffer->ch_count) % payload_size,
0, 0, 0);
}
tx->buffer ++;
platform_spin_unlock(&tx->spin);
@@ -840,10 +834,10 @@ int rtpenc_h264_parse_nal_units(uint8_t *buf_in, int size,
static uint8_t *rtpenc_h264_find_startcode_internal(uint8_t *start,
uint8_t *end)
{
uint8_t *p = start;
uint8_t *pend = end; // - 3; // XXX: w/o -3, p[1] and p[2] may fail.
//uint8_t *p = start;
//uint8_t *pend = end; // - 3; // XXX: w/o -3, p[1] and p[2] may fail.
for (p = start; p < pend; p++) {
for (uint8_t *p = start; p < end; p++) {
if (p[0] == 0 && p[1] == 0 && p[2] == 1) {
return p;
}
@@ -888,13 +882,14 @@ int rtpenc_h264_parse_nal_units(uint8_t *buf_in, int size,
}
int nal_size = nal_end - nal_start;
size += nal_size;
if(nal_size > 4){
size += nal_size;
nals[(*nnals)].data = nal_start;
nals[(*nnals)].size = nal_size;
(*nnals)++;
nals[(*nnals)].data = nal_start;
nals[(*nnals)].size = nal_size;
(*nnals)++;
nal_start = nal_end;
nal_start = nal_end;
}else nal_start += 3;
}
return size;
}
@@ -937,6 +932,7 @@ static void tx_send_base_h264(struct tx *tx, struct tile *tile, struct rtp *rtp_
int fragmentation = 0;
int nal_max_size = tx->mtu - 40;
if (nal.size > nal_max_size) {
debug_msg("RTP packet size exceeds the MTU size\n");
fragmentation = 1;
@@ -1024,7 +1020,6 @@ static void tx_send_base_h264(struct tx *tx, struct tile *tile, struct rtp *rtp_
}
}
else {
uint8_t frag_header[2];
int frag_header_size = 2;

View File

@@ -111,6 +111,12 @@ struct video_desc {
unsigned int tile_count;
};
typedef enum h264_frame_type {
INTRA,
BFRAME,
OTHER
} h264_frame_type_t;
enum fec_type {
FEC_NONE = 0,
FEC_MULT = 1,
@@ -179,11 +185,21 @@ struct video_frame {
//standard transport
uint8_t isStd;
//h264_params
uint8_t h264_bframe;
uint8_t h264_iframe;
int h264_width;
int h264_height;
//H264 Standard transport
// Config
unsigned int h264_width;
unsigned int h264_height;
codec_t codec;
// Data
unsigned char *h264_buffer;
unsigned int h264_buffer_len;
unsigned int h264_offset_len;
unsigned char *h264_offset_buffer;
// Stats
unsigned int h264_media_time;
unsigned int h264_seqno;
// Control
h264_frame_type_t h264_frame_type;
struct fec_desc fec_params;
};

388
src/utils/bs.h Normal file
View File

@@ -0,0 +1,388 @@
/*
* h264bitstream - a library for reading and writing H.264 video
* Copyright (C) 2005-2007 Auroras Entertainment, LLC
* Copyright (C) 2008-2011 Avail-TVN
*
* Written by Alex Izvorski <aizvorski@gmail.com> and Alex Giladi <alex.giladi@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef _H264_BS_H
#define _H264_BS_H 1
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct
{
uint8_t* start;
uint8_t* p;
uint8_t* end;
int bits_left;
} bs_t;
#define _OPTIMIZE_BS_ 1
#if ( _OPTIMIZE_BS_ > 0 )
#ifndef FAST_U8
#define FAST_U8
#endif
#endif
static bs_t* bs_new(uint8_t* buf, size_t size);
static void bs_free(bs_t* b);
static bs_t* bs_clone( bs_t* dest, const bs_t* src );
static bs_t* bs_init(bs_t* b, uint8_t* buf, size_t size);
static uint32_t bs_byte_aligned(bs_t* b);
static int bs_eof(bs_t* b);
static int bs_overrun(bs_t* b);
static int bs_pos(bs_t* b);
static uint32_t bs_peek_u1(bs_t* b);
static uint32_t bs_read_u1(bs_t* b);
static uint32_t bs_read_u(bs_t* b, int n);
static uint32_t bs_read_f(bs_t* b, int n);
static uint32_t bs_read_u8(bs_t* b);
static uint32_t bs_read_ue(bs_t* b);
static int32_t bs_read_se(bs_t* b);
static void bs_write_u1(bs_t* b, uint32_t v);
static void bs_write_u(bs_t* b, int n, uint32_t v);
static void bs_write_f(bs_t* b, int n, uint32_t v);
static void bs_write_u8(bs_t* b, uint32_t v);
static void bs_write_ue(bs_t* b, uint32_t v);
static void bs_write_se(bs_t* b, int32_t v);
static int bs_read_bytes(bs_t* b, uint8_t* buf, int len);
static int bs_write_bytes(bs_t* b, uint8_t* buf, int len);
static int bs_skip_bytes(bs_t* b, int len);
static uint32_t bs_next_bits(bs_t* b, int nbits);
// IMPLEMENTATION
static inline bs_t* bs_init(bs_t* b, uint8_t* buf, size_t size)
{
b->start = buf;
b->p = buf;
b->end = buf + size;
b->bits_left = 8;
return b;
}
static inline bs_t* bs_new(uint8_t* buf, size_t size)
{
bs_t* b = (bs_t*)malloc(sizeof(bs_t));
bs_init(b, buf, size);
return b;
}
static inline void bs_free(bs_t* b)
{
free(b);
}
static inline bs_t* bs_clone(bs_t* dest, const bs_t* src)
{
dest->start = src->p;
dest->p = src->p;
dest->end = src->end;
dest->bits_left = src->bits_left;
return dest;
}
static inline uint32_t bs_byte_aligned(bs_t* b)
{
return (b->bits_left == 8);
}
static inline int bs_eof(bs_t* b) { if (b->p >= b->end) { return 1; } else { return 0; } }
static inline int bs_overrun(bs_t* b) { if (b->p > b->end) { return 1; } else { return 0; } }
static inline int bs_pos(bs_t* b) { if (b->p > b->end) { return (b->end - b->start); } else { return (b->p - b->start); } }
static inline int bs_bytes_left(bs_t* b) { return (b->end - b->p); }
static inline uint32_t bs_read_u1(bs_t* b)
{
uint32_t r = 0;
b->bits_left--;
if (! bs_eof(b))
{
r = ((*(b->p)) >> b->bits_left) & 0x01;
}
if (b->bits_left == 0) { b->p ++; b->bits_left = 8; }
return r;
}
static inline void bs_skip_u1(bs_t* b)
{
b->bits_left--;
if (b->bits_left == 0) { b->p ++; b->bits_left = 8; }
}
static inline uint32_t bs_peek_u1(bs_t* b)
{
uint32_t r = 0;
if (! bs_eof(b))
{
r = ((*(b->p)) >> ( b->bits_left - 1 )) & 0x01;
}
return r;
}
static inline uint32_t bs_read_u(bs_t* b, int n)
{
uint32_t r = 0;
int i;
for (i = 0; i < n; i++)
{
r |= ( bs_read_u1(b) << ( n - i - 1 ) );
}
return r;
}
static inline void bs_skip_u(bs_t* b, int n)
{
int i;
for ( i = 0; i < n; i++ )
{
bs_skip_u1( b );
}
}
static inline uint32_t bs_read_f(bs_t* b, int n) { return bs_read_u(b, n); }
static inline uint32_t bs_read_u8(bs_t* b)
{
#ifdef FAST_U8
if (b->bits_left == 8 && ! bs_eof(b)) // can do fast read
{
uint32_t r = b->p[0];
b->p++;
return r;
}
#endif
return bs_read_u(b, 8);
}
static inline uint32_t bs_read_ue(bs_t* b)
{
int32_t r = 0;
int i = 0;
while( (bs_read_u1(b) == 0) && (i < 32) && (!bs_eof(b)) )
{
i++;
}
r = bs_read_u(b, i);
r += (1 << i) - 1;
return r;
}
static inline int32_t bs_read_se(bs_t* b)
{
int32_t r = bs_read_ue(b);
if (r & 0x01)
{
r = (r+1)/2;
}
else
{
r = -(r/2);
}
return r;
}
static inline void bs_write_u1(bs_t* b, uint32_t v)
{
b->bits_left--;
if (! bs_eof(b))
{
// FIXME this is slow, but we must clear bit first
// is it better to memset(0) the whole buffer during bs_init() instead?
// if we don't do either, we introduce pretty nasty bugs
(*(b->p)) &= ~(0x01 << b->bits_left);
(*(b->p)) |= ((v & 0x01) << b->bits_left);
}
if (b->bits_left == 0) { b->p ++; b->bits_left = 8; }
}
static inline void bs_write_u(bs_t* b, int n, uint32_t v)
{
int i;
for (i = 0; i < n; i++)
{
bs_write_u1(b, (v >> ( n - i - 1 ))&0x01 );
}
}
static inline void bs_write_f(bs_t* b, int n, uint32_t v) { bs_write_u(b, n, v); }
static inline void bs_write_u8(bs_t* b, uint32_t v)
{
#ifdef FAST_U8
if (b->bits_left == 8 && ! bs_eof(b)) // can do fast write
{
b->p[0] = v;
b->p++;
return;
}
#endif
bs_write_u(b, 8, v);
}
static inline void bs_write_ue(bs_t* b, uint32_t v)
{
static const int len_table[256] =
{
1,
1,
2,2,
3,3,3,3,
4,4,4,4,4,4,4,4,
5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,
};
int len;
if (v == 0)
{
bs_write_u1(b, 1);
}
else
{
v++;
if (v >= 0x01000000)
{
len = 24 + len_table[ v >> 24 ];
}
else if(v >= 0x00010000)
{
len = 16 + len_table[ v >> 16 ];
}
else if(v >= 0x00000100)
{
len = 8 + len_table[ v >> 8 ];
}
else
{
len = len_table[ v ];
}
bs_write_u(b, 2*len-1, v);
}
}
static inline void bs_write_se(bs_t* b, int32_t v)
{
if (v <= 0)
{
bs_write_ue(b, -v*2);
}
else
{
bs_write_ue(b, v*2 - 1);
}
}
static inline int bs_read_bytes(bs_t* b, uint8_t* buf, int len)
{
int actual_len = len;
if (b->end - b->p < actual_len) { actual_len = b->end - b->p; }
if (actual_len < 0) { actual_len = 0; }
memcpy(buf, b->p, actual_len);
if (len < 0) { len = 0; }
b->p += len;
return actual_len;
}
static inline int bs_write_bytes(bs_t* b, uint8_t* buf, int len)
{
int actual_len = len;
if (b->end - b->p < actual_len) { actual_len = b->end - b->p; }
if (actual_len < 0) { actual_len = 0; }
memcpy(b->p, buf, actual_len);
if (len < 0) { len = 0; }
b->p += len;
return actual_len;
}
static inline int bs_skip_bytes(bs_t* b, int len)
{
int actual_len = len;
if (b->end - b->p < actual_len) { actual_len = b->end - b->p; }
if (actual_len < 0) { actual_len = 0; }
if (len < 0) { len = 0; }
b->p += len;
return actual_len;
}
static inline uint32_t bs_next_bits(bs_t* bs, int nbits)
{
bs_t b;
bs_clone(&b,bs);
return bs_read_u(&b, nbits);
}
static inline uint64_t bs_next_bytes(bs_t* bs, int nbytes)
{
int i = 0;
uint64_t val = 0;
if ( (nbytes > 8) || (nbytes < 1) ) { return 0; }
if (bs->p + nbytes > bs->end) { return 0; }
for ( i = 0; i < nbytes; i++ ) { val = ( val << 8 ) | bs->p[i]; }
return val;
}
#define bs_print_state(b) fprintf( stderr, "%s:%d@%s: b->p=0x%02hhX, b->left = %d\n", __FILE__, __LINE__, __FUNCTION__, *b->p, b->bits_left )
#ifdef __cplusplus
}
#endif
#endif

426
src/utils/h264_stream.c Normal file
View File

@@ -0,0 +1,426 @@
/*
* h264bitstream - a library for reading and writing H.264 video
* Copyright (C) 2005-2007 Auroras Entertainment, LLC
* Copyright (C) 2008-2011 Avail-TVN
*
* Written by Alex Izvorski <aizvorski@gmail.com> and Alex Giladi <alex.giladi@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <stdint.h>
#include <stdlib.h>
#include <stdio.h>
#include "h264_stream.h"
/***************************** reading ******************************/
/**
Convert NAL data (Annex B format) to RBSP data.
The size of rbsp_buf must be the same as size of the nal_buf to guarantee the output will fit.
If that is not true, output may be truncated and an error will be returned.
Additionally, certain byte sequences in the input nal_buf are not allowed in the spec and also cause the conversion to fail and an error to be returned.
@param[in] nal_buf the nal data
@param[in,out] nal_size as input, pointer to the size of the nal data; as output, filled in with the actual size of the nal data
@param[in,out] rbsp_buf allocated memory in which to put the rbsp data
@param[in,out] rbsp_size as input, pointer to the maximum size of the rbsp data; as output, filled in with the actual size of rbsp data
@return actual size of rbsp data, or -1 on error
*/
// 7.3.1 NAL unit syntax
// 7.4.1.1 Encapsulation of an SODB within an RBSP
int nal_to_rbsp(const uint8_t* nal_buf, int* nal_size, uint8_t* rbsp_buf, int* rbsp_size)
{
int i;
int j = 0;
int count = 0;
for( i = 1; i < *nal_size; i++ )
{
// in NAL unit, 0x000000, 0x000001 or 0x000002 shall not occur at any byte-aligned position
if( ( count == 2 ) && ( nal_buf[i] < 0x03) )
{
return -1;
}
if( ( count == 2 ) && ( nal_buf[i] == 0x03) )
{
// check the 4th byte after 0x000003, except when cabac_zero_word is used, in which case the last three bytes of this NAL unit must be 0x000003
if((i < *nal_size - 1) && (nal_buf[i+1] > 0x03))
{
return -1;
}
// if cabac_zero_word is used, the final byte of this NAL unit(0x03) is discarded, and the last two bytes of RBSP must be 0x0000
if(i == *nal_size - 1)
{
break;
}
i++;
count = 0;
}
if ( j >= *rbsp_size )
{
// error, not enough space
return -1;
}
rbsp_buf[j] = nal_buf[i];
if(nal_buf[i] == 0x00)
{
count++;
}
else
{
count = 0;
}
j++;
}
*nal_size = i;
*rbsp_size = j;
return j;
}
//7.3.2.1 Sequence parameter set RBSP syntax
int read_seq_parameter_set_rbsp(sps_t* sps, bs_t* b) {
int i;
memset(sps, 0, sizeof(sps_t));
sps->profile_idc = bs_read_u8(b);
sps->constraint_set0_flag = bs_read_u1(b);
sps->constraint_set1_flag = bs_read_u1(b);
sps->constraint_set2_flag = bs_read_u1(b);
sps->constraint_set3_flag = bs_read_u1(b);
sps->constraint_set4_flag = bs_read_u1(b);
sps->constraint_set5_flag = bs_read_u1(b);
sps->reserved_zero_2bits = bs_read_u(b,2); /* all 0's */
sps->level_idc = bs_read_u8(b);
sps->seq_parameter_set_id = bs_read_ue(b);
sps->chroma_format_idc = 1;
if( sps->profile_idc == 100 || sps->profile_idc == 110 ||
sps->profile_idc == 122 || sps->profile_idc == 144 )
{
sps->chroma_format_idc = bs_read_ue(b);
if( sps->chroma_format_idc == 3 )
{
sps->residual_colour_transform_flag = bs_read_u1(b);
}
sps->bit_depth_luma_minus8 = bs_read_ue(b);
sps->bit_depth_chroma_minus8 = bs_read_ue(b);
sps->qpprime_y_zero_transform_bypass_flag = bs_read_u1(b);
sps->seq_scaling_matrix_present_flag = bs_read_u1(b);
if( sps->seq_scaling_matrix_present_flag )
{
for( i = 0; i < 8; i++ )
{
sps->seq_scaling_list_present_flag[ i ] = bs_read_u1(b);
if( sps->seq_scaling_list_present_flag[ i ] )
{
if( i < 6 )
{
read_scaling_list( b, sps->ScalingList4x4[ i ], 16,
sps->UseDefaultScalingMatrix4x4Flag[ i ]);
}
else
{
read_scaling_list( b, sps->ScalingList8x8[ i - 6 ], 64,
sps->UseDefaultScalingMatrix8x8Flag[ i - 6 ] );
}
}
}
}
}
sps->log2_max_frame_num_minus4 = bs_read_ue(b);
sps->pic_order_cnt_type = bs_read_ue(b);
if( sps->pic_order_cnt_type == 0 )
{
sps->log2_max_pic_order_cnt_lsb_minus4 = bs_read_ue(b);
}
else if( sps->pic_order_cnt_type == 1 )
{
sps->delta_pic_order_always_zero_flag = bs_read_u1(b);
sps->offset_for_non_ref_pic = bs_read_se(b);
sps->offset_for_top_to_bottom_field = bs_read_se(b);
sps->num_ref_frames_in_pic_order_cnt_cycle = bs_read_ue(b);
for( i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++ )
{
sps->offset_for_ref_frame[ i ] = bs_read_se(b);
}
}
sps->num_ref_frames = bs_read_ue(b);
sps->gaps_in_frame_num_value_allowed_flag = bs_read_u1(b);
sps->pic_width_in_mbs_minus1 = bs_read_ue(b);
sps->pic_height_in_map_units_minus1 = bs_read_ue(b);
sps->frame_mbs_only_flag = bs_read_u1(b);
if( !sps->frame_mbs_only_flag )
{
sps->mb_adaptive_frame_field_flag = bs_read_u1(b);
}
sps->direct_8x8_inference_flag = bs_read_u1(b);
sps->frame_cropping_flag = bs_read_u1(b);
if( sps->frame_cropping_flag )
{
sps->frame_crop_left_offset = bs_read_ue(b);
sps->frame_crop_right_offset = bs_read_ue(b);
sps->frame_crop_top_offset = bs_read_ue(b);
sps->frame_crop_bottom_offset = bs_read_ue(b);
}
sps->vui_parameters_present_flag = bs_read_u1(b);
if( sps->vui_parameters_present_flag )
{
read_vui_parameters(sps, b);
}
read_rbsp_trailing_bits(b);
return 0;
}
//7.3.2.1.1 Scaling list syntax
void read_scaling_list(bs_t* b, int* scalingList, int sizeOfScalingList, int useDefaultScalingMatrixFlag )
{
int j;
if(scalingList == NULL)
{
return;
}
int lastScale = 8;
int nextScale = 8;
for( j = 0; j < sizeOfScalingList; j++ )
{
if( nextScale != 0 )
{
int delta_scale = bs_read_se(b);
nextScale = ( lastScale + delta_scale + 256 ) % 256;
useDefaultScalingMatrixFlag = ( j == 0 && nextScale == 0 );
}
scalingList[ j ] = ( nextScale == 0 ) ? lastScale : nextScale;
lastScale = scalingList[ j ];
}
}
//Appendix E.1.1 VUI parameters syntax
void read_vui_parameters(sps_t* sps, bs_t* b)
{
sps->vui.aspect_ratio_info_present_flag = bs_read_u1(b);
if( sps->vui.aspect_ratio_info_present_flag )
{
sps->vui.aspect_ratio_idc = bs_read_u8(b);
if( sps->vui.aspect_ratio_idc == SAR_Extended )
{
sps->vui.sar_width = bs_read_u(b,16);
sps->vui.sar_height = bs_read_u(b,16);
}
}
sps->vui.overscan_info_present_flag = bs_read_u1(b);
if( sps->vui.overscan_info_present_flag )
{
sps->vui.overscan_appropriate_flag = bs_read_u1(b);
}
sps->vui.video_signal_type_present_flag = bs_read_u1(b);
if( sps->vui.video_signal_type_present_flag )
{
sps->vui.video_format = bs_read_u(b,3);
sps->vui.video_full_range_flag = bs_read_u1(b);
sps->vui.colour_description_present_flag = bs_read_u1(b);
if( sps->vui.colour_description_present_flag )
{
sps->vui.colour_primaries = bs_read_u8(b);
sps->vui.transfer_characteristics = bs_read_u8(b);
sps->vui.matrix_coefficients = bs_read_u8(b);
}
}
sps->vui.chroma_loc_info_present_flag = bs_read_u1(b);
if( sps->vui.chroma_loc_info_present_flag )
{
sps->vui.chroma_sample_loc_type_top_field = bs_read_ue(b);
sps->vui.chroma_sample_loc_type_bottom_field = bs_read_ue(b);
}
sps->vui.timing_info_present_flag = bs_read_u1(b);
if( sps->vui.timing_info_present_flag )
{
sps->vui.num_units_in_tick = bs_read_u(b,32);
sps->vui.time_scale = bs_read_u(b,32);
sps->vui.fixed_frame_rate_flag = bs_read_u1(b);
}
sps->vui.nal_hrd_parameters_present_flag = bs_read_u1(b);
if( sps->vui.nal_hrd_parameters_present_flag )
{
read_hrd_parameters(sps, b);
}
sps->vui.vcl_hrd_parameters_present_flag = bs_read_u1(b);
if( sps->vui.vcl_hrd_parameters_present_flag )
{
read_hrd_parameters(sps, b);
}
if( sps->vui.nal_hrd_parameters_present_flag || sps->vui.vcl_hrd_parameters_present_flag )
{
sps->vui.low_delay_hrd_flag = bs_read_u1(b);
}
sps->vui.pic_struct_present_flag = bs_read_u1(b);
sps->vui.bitstream_restriction_flag = bs_read_u1(b);
if( sps->vui.bitstream_restriction_flag )
{
sps->vui.motion_vectors_over_pic_boundaries_flag = bs_read_u1(b);
sps->vui.max_bytes_per_pic_denom = bs_read_ue(b);
sps->vui.max_bits_per_mb_denom = bs_read_ue(b);
sps->vui.log2_max_mv_length_horizontal = bs_read_ue(b);
sps->vui.log2_max_mv_length_vertical = bs_read_ue(b);
sps->vui.num_reorder_frames = bs_read_ue(b);
sps->vui.max_dec_frame_buffering = bs_read_ue(b);
}
}
//Appendix E.1.2 HRD parameters syntax
void read_hrd_parameters(sps_t* sps, bs_t* b)
{
int SchedSelIdx;
sps->hrd.cpb_cnt_minus1 = bs_read_ue(b);
sps->hrd.bit_rate_scale = bs_read_u(b,4);
sps->hrd.cpb_size_scale = bs_read_u(b,4);
for( SchedSelIdx = 0; SchedSelIdx <= sps->hrd.cpb_cnt_minus1; SchedSelIdx++ )
{
sps->hrd.bit_rate_value_minus1[ SchedSelIdx ] = bs_read_ue(b);
sps->hrd.cpb_size_value_minus1[ SchedSelIdx ] = bs_read_ue(b);
sps->hrd.cbr_flag[ SchedSelIdx ] = bs_read_u1(b);
}
sps->hrd.initial_cpb_removal_delay_length_minus1 = bs_read_u(b,5);
sps->hrd.cpb_removal_delay_length_minus1 = bs_read_u(b,5);
sps->hrd.dpb_output_delay_length_minus1 = bs_read_u(b,5);
sps->hrd.time_offset_length = bs_read_u(b,5);
}
//7.3.2.11 RBSP trailing bits syntax
void read_rbsp_trailing_bits(bs_t* b)
{
int rbsp_stop_one_bit = bs_read_u1( b ); // equal to 1
while( !bs_byte_aligned(b) )
{
int rbsp_alignment_zero_bit = bs_read_u1( b ); // equal to 0
}
}
/***************************** debug ******************************/
void debug_sps(sps_t* sps)
{
printf("======= SPS =======\n");
printf(" profile_idc : %d \n", sps->profile_idc );
printf(" constraint_set0_flag : %d \n", sps->constraint_set0_flag );
printf(" constraint_set1_flag : %d \n", sps->constraint_set1_flag );
printf(" constraint_set2_flag : %d \n", sps->constraint_set2_flag );
printf(" constraint_set3_flag : %d \n", sps->constraint_set3_flag );
printf(" constraint_set4_flag : %d \n", sps->constraint_set4_flag );
printf(" constraint_set5_flag : %d \n", sps->constraint_set5_flag );
printf(" reserved_zero_2bits : %d \n", sps->reserved_zero_2bits );
printf(" level_idc : %d \n", sps->level_idc );
printf(" seq_parameter_set_id : %d \n", sps->seq_parameter_set_id );
printf(" chroma_format_idc : %d \n", sps->chroma_format_idc );
printf(" residual_colour_transform_flag : %d \n", sps->residual_colour_transform_flag );
printf(" bit_depth_luma_minus8 : %d \n", sps->bit_depth_luma_minus8 );
printf(" bit_depth_chroma_minus8 : %d \n", sps->bit_depth_chroma_minus8 );
printf(" qpprime_y_zero_transform_bypass_flag : %d \n", sps->qpprime_y_zero_transform_bypass_flag );
printf(" seq_scaling_matrix_present_flag : %d \n", sps->seq_scaling_matrix_present_flag );
// int seq_scaling_list_present_flag[8];
// void* ScalingList4x4[6];
// int UseDefaultScalingMatrix4x4Flag[6];
// void* ScalingList8x8[2];
// int UseDefaultScalingMatrix8x8Flag[2];
printf(" log2_max_frame_num_minus4 : %d \n", sps->log2_max_frame_num_minus4 );
printf(" pic_order_cnt_type : %d \n", sps->pic_order_cnt_type );
printf(" log2_max_pic_order_cnt_lsb_minus4 : %d \n", sps->log2_max_pic_order_cnt_lsb_minus4 );
printf(" delta_pic_order_always_zero_flag : %d \n", sps->delta_pic_order_always_zero_flag );
printf(" offset_for_non_ref_pic : %d \n", sps->offset_for_non_ref_pic );
printf(" offset_for_top_to_bottom_field : %d \n", sps->offset_for_top_to_bottom_field );
printf(" num_ref_frames_in_pic_order_cnt_cycle : %d \n", sps->num_ref_frames_in_pic_order_cnt_cycle );
// int offset_for_ref_frame[256];
printf(" num_ref_frames : %d \n", sps->num_ref_frames );
printf(" gaps_in_frame_num_value_allowed_flag : %d \n", sps->gaps_in_frame_num_value_allowed_flag );
printf(" pic_width_in_mbs_minus1 : %d \n", sps->pic_width_in_mbs_minus1 );
printf(" pic_height_in_map_units_minus1 : %d \n", sps->pic_height_in_map_units_minus1 );
printf(" frame_mbs_only_flag : %d \n", sps->frame_mbs_only_flag );
printf(" mb_adaptive_frame_field_flag : %d \n", sps->mb_adaptive_frame_field_flag );
printf(" direct_8x8_inference_flag : %d \n", sps->direct_8x8_inference_flag );
printf(" frame_cropping_flag : %d \n", sps->frame_cropping_flag );
printf(" frame_crop_left_offset : %d \n", sps->frame_crop_left_offset );
printf(" frame_crop_right_offset : %d \n", sps->frame_crop_right_offset );
printf(" frame_crop_top_offset : %d \n", sps->frame_crop_top_offset );
printf(" frame_crop_bottom_offset : %d \n", sps->frame_crop_bottom_offset );
printf(" vui_parameters_present_flag : %d \n", sps->vui_parameters_present_flag );
printf("=== VUI ===\n");
printf(" aspect_ratio_info_present_flag : %d \n", sps->vui.aspect_ratio_info_present_flag );
printf(" aspect_ratio_idc : %d \n", sps->vui.aspect_ratio_idc );
printf(" sar_width : %d \n", sps->vui.sar_width );
printf(" sar_height : %d \n", sps->vui.sar_height );
printf(" overscan_info_present_flag : %d \n", sps->vui.overscan_info_present_flag );
printf(" overscan_appropriate_flag : %d \n", sps->vui.overscan_appropriate_flag );
printf(" video_signal_type_present_flag : %d \n", sps->vui.video_signal_type_present_flag );
printf(" video_format : %d \n", sps->vui.video_format );
printf(" video_full_range_flag : %d \n", sps->vui.video_full_range_flag );
printf(" colour_description_present_flag : %d \n", sps->vui.colour_description_present_flag );
printf(" colour_primaries : %d \n", sps->vui.colour_primaries );
printf(" transfer_characteristics : %d \n", sps->vui.transfer_characteristics );
printf(" matrix_coefficients : %d \n", sps->vui.matrix_coefficients );
printf(" chroma_loc_info_present_flag : %d \n", sps->vui.chroma_loc_info_present_flag );
printf(" chroma_sample_loc_type_top_field : %d \n", sps->vui.chroma_sample_loc_type_top_field );
printf(" chroma_sample_loc_type_bottom_field : %d \n", sps->vui.chroma_sample_loc_type_bottom_field );
printf(" timing_info_present_flag : %d \n", sps->vui.timing_info_present_flag );
printf(" num_units_in_tick : %d \n", sps->vui.num_units_in_tick );
printf(" time_scale : %d \n", sps->vui.time_scale );
printf(" fixed_frame_rate_flag : %d \n", sps->vui.fixed_frame_rate_flag );
printf(" nal_hrd_parameters_present_flag : %d \n", sps->vui.nal_hrd_parameters_present_flag );
printf(" vcl_hrd_parameters_present_flag : %d \n", sps->vui.vcl_hrd_parameters_present_flag );
printf(" low_delay_hrd_flag : %d \n", sps->vui.low_delay_hrd_flag );
printf(" pic_struct_present_flag : %d \n", sps->vui.pic_struct_present_flag );
printf(" bitstream_restriction_flag : %d \n", sps->vui.bitstream_restriction_flag );
printf(" motion_vectors_over_pic_boundaries_flag : %d \n", sps->vui.motion_vectors_over_pic_boundaries_flag );
printf(" max_bytes_per_pic_denom : %d \n", sps->vui.max_bytes_per_pic_denom );
printf(" max_bits_per_mb_denom : %d \n", sps->vui.max_bits_per_mb_denom );
printf(" log2_max_mv_length_horizontal : %d \n", sps->vui.log2_max_mv_length_horizontal );
printf(" log2_max_mv_length_vertical : %d \n", sps->vui.log2_max_mv_length_vertical );
printf(" num_reorder_frames : %d \n", sps->vui.num_reorder_frames );
printf(" max_dec_frame_buffering : %d \n", sps->vui.max_dec_frame_buffering );
printf("=== HRD ===\n");
printf(" cpb_cnt_minus1 : %d \n", sps->hrd.cpb_cnt_minus1 );
printf(" bit_rate_scale : %d \n", sps->hrd.bit_rate_scale );
printf(" cpb_size_scale : %d \n", sps->hrd.cpb_size_scale );
int SchedSelIdx;
for( SchedSelIdx = 0; SchedSelIdx <= sps->hrd.cpb_cnt_minus1; SchedSelIdx++ )
{
printf(" bit_rate_value_minus1[%d] : %d \n", SchedSelIdx, sps->hrd.bit_rate_value_minus1[SchedSelIdx] ); // up to cpb_cnt_minus1, which is <= 31
printf(" cpb_size_value_minus1[%d] : %d \n", SchedSelIdx, sps->hrd.cpb_size_value_minus1[SchedSelIdx] );
printf(" cbr_flag[%d] : %d \n", SchedSelIdx, sps->hrd.cbr_flag[SchedSelIdx] );
}
printf(" initial_cpb_removal_delay_length_minus1 : %d \n", sps->hrd.initial_cpb_removal_delay_length_minus1 );
printf(" cpb_removal_delay_length_minus1 : %d \n", sps->hrd.cpb_removal_delay_length_minus1 );
printf(" dpb_output_delay_length_minus1 : %d \n", sps->hrd.dpb_output_delay_length_minus1 );
printf(" time_offset_length : %d \n", sps->hrd.time_offset_length );
}

165
src/utils/h264_stream.h Normal file
View File

@@ -0,0 +1,165 @@
/*
* h264bitstream - a library for reading and writing H.264 video
* Copyright (C) 2005-2007 Auroras Entertainment, LLC
* Copyright (C) 2008-2011 Avail-TVN
*
* Written by Alex Izvorski <aizvorski@gmail.com> and Alex Giladi <alex.giladi@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef _H264_STREAM_H
#define _H264_STREAM_H 1
#include <stdint.h>
#include <stdio.h>
#include <assert.h>
#include "bs.h"
#ifdef __cplusplus
extern "C" {
#endif
/**
Sequence Parameter Set
@see 7.3.2.1 Sequence parameter set RBSP syntax
@see read_seq_parameter_set_rbsp
@see write_seq_parameter_set_rbsp
@see debug_sps
*/
typedef struct
{
int profile_idc;
int constraint_set0_flag;
int constraint_set1_flag;
int constraint_set2_flag;
int constraint_set3_flag;
int constraint_set4_flag;
int constraint_set5_flag;
int reserved_zero_2bits;
int level_idc;
int seq_parameter_set_id;
int chroma_format_idc;
int residual_colour_transform_flag;
int bit_depth_luma_minus8;
int bit_depth_chroma_minus8;
int qpprime_y_zero_transform_bypass_flag;
int seq_scaling_matrix_present_flag;
int seq_scaling_list_present_flag[8];
int* ScalingList4x4[6];
int UseDefaultScalingMatrix4x4Flag[6];
int* ScalingList8x8[2];
int UseDefaultScalingMatrix8x8Flag[2];
int log2_max_frame_num_minus4;
int pic_order_cnt_type;
int log2_max_pic_order_cnt_lsb_minus4;
int delta_pic_order_always_zero_flag;
int offset_for_non_ref_pic;
int offset_for_top_to_bottom_field;
int num_ref_frames_in_pic_order_cnt_cycle;
int offset_for_ref_frame[256];
int num_ref_frames;
int gaps_in_frame_num_value_allowed_flag;
int pic_width_in_mbs_minus1;
int pic_height_in_map_units_minus1;
int frame_mbs_only_flag;
int mb_adaptive_frame_field_flag;
int direct_8x8_inference_flag;
int frame_cropping_flag;
int frame_crop_left_offset;
int frame_crop_right_offset;
int frame_crop_top_offset;
int frame_crop_bottom_offset;
int vui_parameters_present_flag;
struct
{
int aspect_ratio_info_present_flag;
int aspect_ratio_idc;
int sar_width;
int sar_height;
int overscan_info_present_flag;
int overscan_appropriate_flag;
int video_signal_type_present_flag;
int video_format;
int video_full_range_flag;
int colour_description_present_flag;
int colour_primaries;
int transfer_characteristics;
int matrix_coefficients;
int chroma_loc_info_present_flag;
int chroma_sample_loc_type_top_field;
int chroma_sample_loc_type_bottom_field;
int timing_info_present_flag;
int num_units_in_tick;
int time_scale;
int fixed_frame_rate_flag;
int nal_hrd_parameters_present_flag;
int vcl_hrd_parameters_present_flag;
int low_delay_hrd_flag;
int pic_struct_present_flag;
int bitstream_restriction_flag;
int motion_vectors_over_pic_boundaries_flag;
int max_bytes_per_pic_denom;
int max_bits_per_mb_denom;
int log2_max_mv_length_horizontal;
int log2_max_mv_length_vertical;
int num_reorder_frames;
int max_dec_frame_buffering;
} vui;
struct
{
int cpb_cnt_minus1;
int bit_rate_scale;
int cpb_size_scale;
int bit_rate_value_minus1[32]; // up to cpb_cnt_minus1, which is <= 31
int cpb_size_value_minus1[32];
int cbr_flag[32];
int initial_cpb_removal_delay_length_minus1;
int cpb_removal_delay_length_minus1;
int dpb_output_delay_length_minus1;
int time_offset_length;
} hrd;
} sps_t;
/**
H264 stream
Contains data structures for all NAL types that can be handled by this library.
When reading, data is read into those, and when writing it is written from those.
The reason why they are all contained in one place is that some of them depend on others, we need to
have all of them available to read or write correctly.
*/
int nal_to_rbsp(const uint8_t* nal_buf, int* nal_size, uint8_t* rbsp_buf, int* rbsp_size);
int read_seq_parameter_set_rbsp(sps_t* h, bs_t* b);
void read_scaling_list(bs_t* b, int* scalingList, int sizeOfScalingList, int useDefaultScalingMatrixFlag );
void read_vui_parameters(sps_t* h, bs_t* b);
void read_hrd_parameters(sps_t* h, bs_t* b);
void read_rbsp_trailing_bits(bs_t* b);
void debug_sps(sps_t* sps);
#define SAR_Extended 255
#ifdef __cplusplus
}
#endif
#endif

File diff suppressed because it is too large Load Diff

View File

@@ -53,7 +53,7 @@ h264_rtp_video_rxtx::h264_rtp_video_rxtx(struct module *parent, struct video_exp
const char *requested_compression, const char *requested_encryption,
const char *receiver, int rx_port, int tx_port,
bool use_ipv6, const char *mcast_if, const char *requested_video_fec, int mtu,
long packet_rate, uint8_t avType) :
long packet_rate, rtps_types_t avType) :
rtp_video_rxtx(parent, video_exporter, requested_compression, requested_encryption,
receiver, rx_port, tx_port,
use_ipv6, mcast_if, requested_video_fec, mtu, packet_rate)

View File

@@ -52,7 +52,7 @@ public:
const char *requested_compression, const char *requested_encryption,
const char *receiver, int rx_port, int tx_port,
bool use_ipv6, const char *mcast_if, const char *requested_video_fec, int mtu,
long packet_rate, uint8_t avType);
long packet_rate, rtps_types_t avType);
virtual ~h264_rtp_video_rxtx();
private:
virtual void send_frame(struct video_frame *);