This makes the bitstream.h header leaner. Signed-off-by: Diego Biurrun <diego@biurrun.de>tags/n4.0
@@ -33,7 +33,6 @@ | |||
#include "libavutil/log.h" | |||
#include "mathops.h" | |||
#include "vlc.h" | |||
typedef struct BitstreamContext { | |||
uint64_t bits; // stores bits read from the buffer | |||
@@ -287,78 +286,6 @@ static inline int bitstream_read_xbits(BitstreamContext *bc, unsigned length) | |||
return ((((uint32_t)(sign ^ cache)) >> (32 - length)) ^ sign) - sign; | |||
} | |||
/* Return the LUT element for the given bitstream configuration. */ | |||
static inline int set_idx(BitstreamContext *bc, int code, int *n, int *nb_bits, | |||
VLC_TYPE (*table)[2]) | |||
{ | |||
unsigned idx; | |||
*nb_bits = -*n; | |||
idx = bitstream_peek(bc, *nb_bits) + code; | |||
*n = table[idx][1]; | |||
return table[idx][0]; | |||
} | |||
/** | |||
* Parse a VLC code. | |||
* @param bits is the number of bits which will be read at once, must be | |||
* identical to nb_bits in init_vlc() | |||
* @param max_depth is the number of times bits bits must be read to completely | |||
* read the longest VLC code | |||
* = (max_vlc_length + bits - 1) / bits | |||
* If the VLC code is invalid and max_depth = 1, then no bits will be removed. | |||
* If the VLC code is invalid and max_depth > 1, then the number of bits removed | |||
* is undefined. */ | |||
static inline int bitstream_read_vlc(BitstreamContext *bc, VLC_TYPE (*table)[2], | |||
int bits, int max_depth) | |||
{ | |||
int nb_bits; | |||
unsigned idx = bitstream_peek(bc, bits); | |||
int code = table[idx][0]; | |||
int n = table[idx][1]; | |||
if (max_depth > 1 && n < 0) { | |||
skip_remaining(bc, bits); | |||
code = set_idx(bc, code, &n, &nb_bits, table); | |||
if (max_depth > 2 && n < 0) { | |||
skip_remaining(bc, nb_bits); | |||
code = set_idx(bc, code, &n, &nb_bits, table); | |||
} | |||
} | |||
skip_remaining(bc, n); | |||
return code; | |||
} | |||
#define BITSTREAM_RL_VLC(level, run, bc, table, bits, max_depth) \ | |||
do { \ | |||
int n, nb_bits; \ | |||
unsigned index = bitstream_peek(bc, bits); \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
\ | |||
if (max_depth > 1 && n < 0) { \ | |||
bitstream_skip(bc, bits); \ | |||
\ | |||
nb_bits = -n; \ | |||
\ | |||
index = bitstream_peek(bc, nb_bits) + level; \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
if (max_depth > 2 && n < 0) { \ | |||
bitstream_skip(bc, nb_bits); \ | |||
nb_bits = -n; \ | |||
\ | |||
index = bitstream_peek(bc, nb_bits) + level; \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
} \ | |||
} \ | |||
run = table[index].run; \ | |||
bitstream_skip(bc, n); \ | |||
} while (0) | |||
/* Return decoded truncated unary code for the values 0, 1, 2. */ | |||
static inline int bitstream_decode012(BitstreamContext *bc) | |||
{ | |||
@@ -21,6 +21,8 @@ | |||
#include <stdint.h> | |||
#include "bitstream.h" | |||
#define VLC_TYPE int16_t | |||
typedef struct VLC { | |||
@@ -62,4 +64,76 @@ void ff_free_vlc(VLC *vlc); | |||
init_vlc(vlc, bits, a, b, c, d, e, f, g, INIT_VLC_USE_NEW_STATIC); \ | |||
} while (0) | |||
/* Return the LUT element for the given bitstream configuration. */ | |||
static inline int set_idx(BitstreamContext *bc, int code, int *n, int *nb_bits, | |||
VLC_TYPE (*table)[2]) | |||
{ | |||
unsigned idx; | |||
*nb_bits = -*n; | |||
idx = bitstream_peek(bc, *nb_bits) + code; | |||
*n = table[idx][1]; | |||
return table[idx][0]; | |||
} | |||
/** | |||
* Parse a VLC code. | |||
* @param bits is the number of bits which will be read at once, must be | |||
* identical to nb_bits in init_vlc() | |||
* @param max_depth is the number of times bits bits must be read to completely | |||
* read the longest VLC code | |||
* = (max_vlc_length + bits - 1) / bits | |||
* If the VLC code is invalid and max_depth = 1, then no bits will be removed. | |||
* If the VLC code is invalid and max_depth > 1, then the number of bits removed | |||
* is undefined. */ | |||
static inline int bitstream_read_vlc(BitstreamContext *bc, VLC_TYPE (*table)[2], | |||
int bits, int max_depth) | |||
{ | |||
int nb_bits; | |||
unsigned idx = bitstream_peek(bc, bits); | |||
int code = table[idx][0]; | |||
int n = table[idx][1]; | |||
if (max_depth > 1 && n < 0) { | |||
skip_remaining(bc, bits); | |||
code = set_idx(bc, code, &n, &nb_bits, table); | |||
if (max_depth > 2 && n < 0) { | |||
skip_remaining(bc, nb_bits); | |||
code = set_idx(bc, code, &n, &nb_bits, table); | |||
} | |||
} | |||
skip_remaining(bc, n); | |||
return code; | |||
} | |||
#define BITSTREAM_RL_VLC(level, run, bc, table, bits, max_depth) \ | |||
do { \ | |||
int n, nb_bits; \ | |||
unsigned index = bitstream_peek(bc, bits); \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
\ | |||
if (max_depth > 1 && n < 0) { \ | |||
bitstream_skip(bc, bits); \ | |||
\ | |||
nb_bits = -n; \ | |||
\ | |||
index = bitstream_peek(bc, nb_bits) + level; \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
if (max_depth > 2 && n < 0) { \ | |||
bitstream_skip(bc, nb_bits); \ | |||
nb_bits = -n; \ | |||
\ | |||
index = bitstream_peek(bc, nb_bits) + level; \ | |||
level = table[index].level; \ | |||
n = table[index].len; \ | |||
} \ | |||
} \ | |||
run = table[index].run; \ | |||
bitstream_skip(bc, n); \ | |||
} while (0) | |||
#endif /* AVCODEC_VLC_H */ |