exit(1);
}
-void parse_matrix_coeffs(uint16_t *dest, const char *str)
+static void parse_matrix_coeffs(uint16_t *dest, const char *str)
{
int i;
const char *p = str;
}
}
-void opt_inter_matrix(const char *arg)
+static void opt_inter_matrix(const char *arg)
{
inter_matrix = av_mallocz(sizeof(uint16_t) * 64);
parse_matrix_coeffs(inter_matrix, arg);
}
-void opt_intra_matrix(const char *arg)
+static void opt_intra_matrix(const char *arg)
{
intra_matrix = av_mallocz(sizeof(uint16_t) * 64);
parse_matrix_coeffs(intra_matrix, arg);
return 0;
}
-uint16_t getbe16(const uint8_t *p)
+static uint16_t getbe16(const uint8_t *p)
{
return (p[0] << 8) | p[1];
}
-int get_nibble(const uint8_t *buf, int nibble_offset)
+static int get_nibble(const uint8_t *buf, int nibble_offset)
{
return (buf[nibble_offset >> 1] >> ((1 - (nibble_offset & 1)) << 2)) & 0xf;
}
spatial_compose97i_dy(&cs, buffer, width, height, stride);
}
-void ff_spatial_idwt_buffered_init(dwt_compose_t *cs, slice_buffer * sb, int width, int height, int stride_line, int type, int decomposition_count){
+static void ff_spatial_idwt_buffered_init(dwt_compose_t *cs, slice_buffer * sb, int width, int height, int stride_line, int type, int decomposition_count){
int level;
for(level=decomposition_count-1; level>=0; level--){
switch(type){
}
}
-void ff_spatial_idwt_init(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
+static void ff_spatial_idwt_init(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
int level;
for(level=decomposition_count-1; level>=0; level--){
switch(type){
}
}
-void ff_spatial_idwt_slice(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count, int y){
+static void ff_spatial_idwt_slice(dwt_compose_t *cs, DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count, int y){
const int support = type==1 ? 3 : 5;
int level;
if(type==2) return;
}
}
-void ff_spatial_idwt_buffered_slice(dwt_compose_t *cs, slice_buffer * slice_buf, int width, int height, int stride_line, int type, int decomposition_count, int y){
+static void ff_spatial_idwt_buffered_slice(dwt_compose_t *cs, slice_buffer * slice_buf, int width, int height, int stride_line, int type, int decomposition_count, int y){
const int support = type==1 ? 3 : 5;
int level;
if(type==2) return;
}
}
-void ff_spatial_idwt(DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
+static void ff_spatial_idwt(DWTELEM *buffer, int width, int height, int stride, int type, int decomposition_count){
if(type==2){
int level;
for(level=decomposition_count-1; level>=0; level--)
return 0;
}
-static void init_qexp(){
+static void init_qexp(void){
int i;
double v=128;
* Call av_free_static automatically before it's too late
*/
-static void do_free() __attribute__ ((destructor));
+static void do_free(void) __attribute__ ((destructor));
-static void do_free()
+static void do_free(void)
{
av_free_static();
}
* @todo TODO: Take into account stride
* @todo TODO: Allow use of external buffers ?
*/
-int alloc_bitplane(BitPlane *bp, int width, int height)
+static int alloc_bitplane(BitPlane *bp, int width, int height)
{
if (!bp || bp->width<0 || bp->height<0) return -1;
bp->data = (uint8_t*)av_malloc(width*height);
/** Free the bitplane's buffer
* @param bp Bitplane which buffer is to free
*/
-void free_bitplane(BitPlane *bp)
+static void free_bitplane(BitPlane *bp)
{
bp->width = bp->stride = bp->height = 0;
if (bp->data) av_freep(&bp->data);
* @todo TODO: Process the blocks
* @todo TODO: Use M$ MPEG-4 cbp prediction
*/
-int vc9_decode_block(VC9Context *v, DCTELEM block[64], int n, int coded, int mquant)
+static int vc9_decode_block(VC9Context *v, DCTELEM block[64], int n, int coded, int mquant)
{
GetBitContext *gb = &v->s.gb;
MpegEncContext *s = &v->s;
/** Decode one I-frame MB (in Simple/Main profile)
* @todo TODO: Extend to AP
*/
-int vc9_decode_i_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_i_mb(VC9Context *v, DCTELEM block[6][64])
{
int i, cbp, val;
uint8_t *coded_val;
* @todo TODO: Extend to AP
* @fixme FIXME: DC value for inter blocks not set
*/
-int vc9_decode_p_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_p_mb(VC9Context *v, DCTELEM block[6][64])
{
MpegEncContext *s = &v->s;
GetBitContext *gb = &s->gb;
* @warning XXX: Used for decoding BI MBs
* @fixme FIXME: DC value for inter blocks not set
*/
-int vc9_decode_b_mb(VC9Context *v, DCTELEM block[6][64])
+static int vc9_decode_b_mb(VC9Context *v, DCTELEM block[6][64])
{
MpegEncContext *s = &v->s;
GetBitContext *gb = &v->s.gb;
/**
* reads 0-32 bits when using the ALT_BITSTREAM_READER_LE bitstream reader
*/
-unsigned int get_bits_long_le(GetBitContext *s, int n){
+static unsigned int get_bits_long_le(GetBitContext *s, int n){
if(n<=17) return get_bits(s, n);
else{
int ret= get_bits(s, 16);
/*
* add an audio output stream
*/
-AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
+static AVStream *add_audio_stream(AVFormatContext *oc, int codec_id)
{
AVCodecContext *c;
AVStream *st;
return st;
}
-void open_audio(AVFormatContext *oc, AVStream *st)
+static void open_audio(AVFormatContext *oc, AVStream *st)
{
AVCodecContext *c;
AVCodec *codec;
/* prepare a 16 bit dummy audio frame of 'frame_size' samples and
'nb_channels' channels */
-void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
+static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
{
int j, i, v;
int16_t *q;
}
}
-void write_audio_frame(AVFormatContext *oc, AVStream *st)
+static void write_audio_frame(AVFormatContext *oc, AVStream *st)
{
AVCodecContext *c;
AVPacket pkt;
}
}
-void close_audio(AVFormatContext *oc, AVStream *st)
+static void close_audio(AVFormatContext *oc, AVStream *st)
{
avcodec_close(st->codec);
int frame_count, video_outbuf_size;
/* add a video output stream */
-AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
+static AVStream *add_video_stream(AVFormatContext *oc, int codec_id)
{
AVCodecContext *c;
AVStream *st;
return st;
}
-AVFrame *alloc_picture(int pix_fmt, int width, int height)
+static AVFrame *alloc_picture(int pix_fmt, int width, int height)
{
AVFrame *picture;
uint8_t *picture_buf;
return picture;
}
-void open_video(AVFormatContext *oc, AVStream *st)
+static void open_video(AVFormatContext *oc, AVStream *st)
{
AVCodec *codec;
AVCodecContext *c;
}
/* prepare a dummy image */
-void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
+static void fill_yuv_image(AVFrame *pict, int frame_index, int width, int height)
{
int x, y, i;
}
}
-void write_video_frame(AVFormatContext *oc, AVStream *st)
+static void write_video_frame(AVFormatContext *oc, AVStream *st)
{
int out_size, ret;
AVCodecContext *c;
frame_count++;
}
-void close_video(AVFormatContext *oc, AVStream *st)
+static void close_video(AVFormatContext *oc, AVStream *st)
{
avcodec_close(st->codec);
av_free(picture->data[0]);
/** Create a bidirectional pipe for the given command.
*/
-rwpipe *rwpipe_open( int argc, char *argv[] )
+static rwpipe *rwpipe_open( int argc, char *argv[] )
{
rwpipe *this = av_mallocz( sizeof( rwpipe ) );
/** Read data from the pipe.
*/
-FILE *rwpipe_reader( rwpipe *this )
+static FILE *rwpipe_reader( rwpipe *this )
{
if ( this != NULL )
return this->reader;
/** Write data to the pipe.
*/
-FILE *rwpipe_writer( rwpipe *this )
+static FILE *rwpipe_writer( rwpipe *this )
{
if ( this != NULL )
return this->writer;
/* Read a number from the pipe - assumes PNM style headers.
*/
-int rwpipe_read_number( rwpipe *rw )
+static int rwpipe_read_number( rwpipe *rw )
{
int value = 0;
int c = 0;
/** Read a PPM P6 header.
*/
-int rwpipe_read_ppm_header( rwpipe *rw, int *width, int *height )
+static int rwpipe_read_ppm_header( rwpipe *rw, int *width, int *height )
{
char line[ 3 ];
FILE *in = rwpipe_reader( rw );
/** Close the pipe and process.
*/
-void rwpipe_close( rwpipe *this )
+static void rwpipe_close( rwpipe *this )
{
if ( this != NULL )
{