@ -3,6 +3,7 @@
* Simple movie writer for vnc ; based on Libavformat API example from FFMPEG
* Simple movie writer for vnc ; based on Libavformat API example from FFMPEG
*
*
* Copyright ( c ) 2003 Fabrice Bellard , 2004 Johannes E . Schindelin
* Copyright ( c ) 2003 Fabrice Bellard , 2004 Johannes E . Schindelin
* Updates copyright ( c ) 2017 Tyrel M . McQueen
*
*
* Permission is hereby granted , free of charge , to any person obtaining a copy
* Permission is hereby granted , free of charge , to any person obtaining a copy
* of this software and associated documentation files ( the " Software " ) , to deal
* of this software and associated documentation files ( the " Software " ) , to deal
@ -25,412 +26,451 @@
# include <stdlib.h>
# include <stdlib.h>
# include <stdio.h>
# include <stdio.h>
# include <string.h>
# include <string.h>
# include <signal.h>
# include <math.h>
# include <math.h>
# include <signal.h>
# ifndef M_PI
# include <sys/time.h>
# define M_PI 3.1415926535897931
# include <libavformat/avformat.h>
# endif
# include <libswscale/swscale.h>
# include "avformat.h"
# include <rfb/rfbclient.h>
# include <rfb/rfbclient.h>
# define STREAM_FRAME_RATE 25 /* 25 images/s */
# define VNC_PIX_FMT AV_PIX_FMT_RGB565 /* pixel format generated by VNC client */
# define OUTPUT_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
/**************************************************************/
static int write_packet ( AVFormatContext * oc , const AVRational * time_base , AVStream * st , AVPacket * pkt )
/* video output */
{
/* rescale output packet timestamp values from codec to stream timebase */
av_packet_rescale_ts ( pkt , * time_base , st - > time_base ) ;
pkt - > stream_index = st - > index ;
/* Write the compressed frame to the media file. */
return av_interleaved_write_frame ( oc , pkt ) ;
}
AVFrame * picture , * tmp_picture ;
/*************************************************/
uint8_t * video_outbuf ;
/* video functions */
int frame_count , video_outbuf_size ;
/* add a video output stream */
/* a wrapper around a single output video stream */
AVStream * add_video_stream ( AVFormatContext * oc , int codec_id , int w , int h )
typedef struct {
{
AVCodecContext * c ;
AVStream * st ;
AVStream * st ;
AVCodec * codec ;
AVCodecContext * enc ;
int64_t pts ;
AVFrame * frame ;
AVFrame * tmp_frame ;
struct SwsContext * sws ;
} VideoOutputStream ;
/* Add an output video stream. */
int add_video_stream ( VideoOutputStream * ost , AVFormatContext * oc ,
enum AVCodecID codec_id , int64_t br , int sr , int w , int h )
{
int i ;
st = av_new_stream ( oc , 0 ) ;
/* find the encoder */
if ( ! st ) {
ost - > codec = avcodec_find_encoder ( codec_id ) ;
fprintf ( stderr , " Could not alloc stream \n " ) ;
if ( ! ( ost - > codec ) ) {
exit ( 1 ) ;
fprintf ( stderr , " Could not find encoder for '%s' \n " ,
}
avcodec_get_name ( codec_id ) ) ;
return - 1 ;
# if LIBAVFORMAT_BUILD<4629
} // no extra memory allocation from this call
c = & st - > codec ;
if ( ost - > codec - > type ! = AVMEDIA_TYPE_VIDEO ) {
# else
fprintf ( stderr , " Encoder for '%s' does not seem to be for video. \n " ,
c = st - > codec ;
avcodec_get_name ( codec_id ) ) ;
# endif
return - 2 ;
c - > codec_id = codec_id ;
c - > codec_type = CODEC_TYPE_VIDEO ;
/* put sample parameters */
c - > bit_rate = 800000 ;
/* resolution must be a multiple of two */
c - > width = w ;
c - > height = h ;
/* frames per second */
# if LIBAVCODEC_BUILD<4754
c - > frame_rate = STREAM_FRAME_RATE ;
c - > frame_rate_base = 1 ;
# else
c - > time_base . den = STREAM_FRAME_RATE ;
c - > time_base . num = 1 ;
c - > pix_fmt = PIX_FMT_YUV420P ;
# endif
c - > gop_size = 12 ; /* emit one intra frame every twelve frames at most */
if ( c - > codec_id = = CODEC_ID_MPEG2VIDEO ) {
/* just for testing, we also add B frames */
c - > max_b_frames = 2 ;
}
}
if ( c - > codec_id = = CODEC_ID_MPEG1VIDEO ) {
ost - > enc = avcodec_alloc_context3 ( ost - > codec ) ;
/* needed to avoid using macroblocks in which some coeffs overflow
if ( ! ( ost - > enc ) ) {
this doesn ' t happen with normal video , it just happens here as the
fprintf ( stderr , " Could not alloc an encoding context \n " ) ;
motion of the chroma plane doesn ' t match the luma plane */
return - 3 ;
c - > mb_decision = 2 ;
} // from now on need to call avcodec_free_context(&(ost->enc)) on error
/* Set codec parameters */
ost - > enc - > codec_id = codec_id ;
ost - > enc - > bit_rate = br ;
/* Resolution must be a multiple of two (round up to avoid buffer overflow). */
ost - > enc - > width = w + ( w % 2 ) ;
ost - > enc - > height = h + ( h % 2 ) ;
/* timebase: This is the fundamental unit of time (in seconds) in terms
* of which frame timestamps are represented . For fixed - fps content ,
* timebase should be 1 / framerate and timestamp increments should be
* identical to 1. */
ost - > enc - > time_base = ( AVRational ) { 1 , sr } ;
ost - > enc - > gop_size = 12 ; /* emit one intra frame every twelve frames at most */
ost - > enc - > pix_fmt = OUTPUT_PIX_FMT ;
if ( ost - > enc - > codec_id = = AV_CODEC_ID_MPEG1VIDEO ) {
/* Needed to avoid using macroblocks in which some coeffs overflow.
* This does not happen with normal video , it just happens here as
* the motion of the chroma plane does not match the luma plane . */
ost - > enc - > mb_decision = 2 ;
}
}
/* some formats want stream headers to be separate */
if ( ! strcmp ( oc - > oformat - > name , " mp4 " ) | | ! strcmp ( oc - > oformat - > name , " mov " ) | | ! strcmp ( oc - > oformat - > name , " 3gp " ) )
ost - > st = avformat_new_stream ( oc , ost - > codec ) ;
c - > flags | = CODEC_FLAG_GLOBAL_HEADER ;
if ( ! ost - > st ) {
fprintf ( stderr , " Could not allocate stream \n " ) ;
return st ;
avcodec_free_context ( & ( ost - > enc ) ) ;
return - 4 ;
} // stream memory cleared up when oc is freed, so no need to do so later in this function on error
ost - > st - > id = oc - > nb_streams - 1 ;
ost - > st - > time_base = ost - > enc - > time_base ;
ost - > pts = 0 ;
/* Some formats want stream headers to be separate. */
if ( oc - > oformat - > flags & AVFMT_GLOBALHEADER )
ost - > enc - > flags | = AV_CODEC_FLAG_GLOBAL_HEADER ;
// must wait to allocate frame buffers until codec is opened (in case codec changes the PIX_FMT)
return 0 ;
}
}
AVFrame * alloc_picture ( int pix_fmt , int width , int height )
AVFrame * alloc_picture ( enum AVPixelForma t pix_fmt , int width , int height )
{
{
AVFrame * picture ;
AVFrame * picture ;
uint8_t * picture_buf ;
int ret ;
int size ;
picture = av_frame_alloc ( ) ;
picture = avcodec_alloc_frame ( ) ;
if ( ! picture )
if ( ! picture )
return NULL ;
return NULL ;
size = avpicture_get_size ( pix_fmt , width , height ) ;
// from now on need to call av_frame_free(&picture) on error
picture_buf = malloc ( size ) ;
picture - > format = pix_fmt ;
if ( ! picture_buf ) {
picture - > width = width ;
av_free ( picture ) ;
picture - > height = height ;
/* allocate the buffers for the frame data */
ret = av_frame_get_buffer ( picture , 64 ) ;
if ( ret < 0 ) {
fprintf ( stderr , " Could not allocate frame data. \n " ) ;
av_frame_free ( & picture ) ;
return NULL ;
return NULL ;
}
}
avpicture_fill ( ( AVPicture * ) picture , picture_buf ,
pix_fmt , width , height ) ;
return picture ;
return picture ;
}
} // use av_frame_free(&picture) to free memory from this call
void open_video ( AVFormatContext * oc , AVStream * st )
{
AVCodec * codec ;
AVCodecContext * c ;
# if LIBAVFORMAT_BUILD<4629
c = & st - > codec ;
# else
c = st - > codec ;
# endif
/* find the video encoder */
codec = avcodec_find_encoder ( c - > codec_id ) ;
if ( ! codec ) {
fprintf ( stderr , " codec not found \n " ) ;
exit ( 1 ) ;
}
int open_video ( AVFormatContext * oc , VideoOutputStream * ost )
{
int ret ;
/* open the codec */
/* open the codec */
if ( avcodec_open ( c , codec ) < 0 ) {
ret = avcodec_open2 ( ost - > enc , ost - > codec , NULL ) ;
fprintf ( stderr , " could not open codec \n " ) ;
if ( ret < 0 ) {
exit ( 1 ) ;
fprintf ( stderr , " Could not open video codec: %s \n " , av_err2str ( ret ) ) ;
return ret ;
} // memory from this call freed when oc is freed, no need to do it on error in this call
/* copy the stream parameters to the muxer */
ret = avcodec_parameters_from_context ( ost - > st - > codecpar , ost - > enc ) ;
if ( ret < 0 ) {
fprintf ( stderr , " Could not copy the stream parameters. \n " ) ;
return ret ;
} // memory from this call is freed when oc (parent of ost->st) is freed, no need to do it on error in this call
/* allocate and init a re-usable frame */
ost - > frame = alloc_picture ( ost - > enc - > pix_fmt , ost - > enc - > width , ost - > enc - > height ) ;
if ( ! ( ost - > frame ) ) {
fprintf ( stderr , " Could not allocate video frame \n " ) ;
return - 1 ;
} // from now on need to call av_frame_free(&(ost->frame)) on error
/* If the output format is not the same as the VNC format, then a temporary VNC format
* picture is needed too . It is then converted to the required
* output format . */
ost - > tmp_frame = NULL ;
ost - > sws = NULL ;
if ( ost - > enc - > pix_fmt ! = VNC_PIX_FMT ) {
ost - > tmp_frame = alloc_picture ( VNC_PIX_FMT , ost - > enc - > width , ost - > enc - > height ) ;
if ( ! ( ost - > tmp_frame ) ) {
fprintf ( stderr , " Could not allocate temporary picture \n " ) ;
av_frame_free ( & ( ost - > frame ) ) ;
return - 2 ;
} // from now on need to call av_frame_free(&(ost->tmp_frame)) on error
ost - > sws = sws_getCachedContext ( ost - > sws , ost - > enc - > width , ost - > enc - > height , VNC_PIX_FMT , ost - > enc - > width , ost - > enc - > height , ost - > enc - > pix_fmt , 0 , NULL , NULL , NULL ) ;
if ( ! ( ost - > sws ) ) {
fprintf ( stderr , " Could not get sws context \n " ) ;
av_frame_free ( & ( ost - > frame ) ) ;
av_frame_free ( & ( ost - > tmp_frame ) ) ;
return - 3 ;
} // from now on need to call sws_freeContext(ost->sws); ost->sws = NULL; on error
}
}
video_outbuf = NULL ;
return 0 ;
if ( ! ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) ) {
}
/* allocate output buffer */
/* XXX: API change will be done */
video_outbuf_size = 200000 ;
video_outbuf = malloc ( video_outbuf_size ) ;
}
/* allocate the encoded raw picture */
/*
picture = alloc_picture ( c - > pix_fmt , c - > width , c - > height ) ;
* encode current video frame and send it to the muxer
if ( ! picture ) {
* return 0 on success , negative on error
fprintf ( stderr , " Could not allocate picture \n " ) ;
*/
exit ( 1 ) ;
int write_video_frame ( AVFormatContext * oc , VideoOutputStream * ost , int64_t pts )
{
int ret , ret2 ;
AVPacket pkt = { 0 } ;
if ( pts < = ost - > pts ) return 0 ; // nothing to do
/* convert format if needed */
if ( ost - > tmp_frame ) {
sws_scale ( ost - > sws , ( const uint8_t * const * ) ost - > tmp_frame - > data ,
ost - > tmp_frame - > linesize , 0 , ost - > enc - > height , ost - > frame - > data , ost - > frame - > linesize ) ;
}
}
/* if the output format is not RGB565, then a temporary RGB565
/* send the imager to encoder */
picture is needed too . It is then converted to the required
ost - > pts = pts ;
output format */
ost - > frame - > pts = ost - > pts ;
tmp_picture = NULL ;
ret = avcodec_send_frame ( ost - > enc , ost - > frame ) ;
if ( c - > pix_fmt ! = PIX_FMT_RGB565 ) {
if ( ret < 0 ) {
tmp_picture = alloc_picture ( PIX_FMT_RGB565 , c - > width , c - > height ) ;
fprintf ( stderr , " Error sending video frame to encoder: %s \n " , av_err2str ( ret ) ) ;
if ( ! tmp_picture ) {
return ret ;
fprintf ( stderr , " Could not allocate temporary picture \n " ) ;
}
exit ( 1 ) ;
/* read all available packets */
ret2 = 0 ;
for ( ret = avcodec_receive_packet ( ost - > enc , & pkt ) ; ret = = 0 ; ret = avcodec_receive_packet ( ost - > enc , & pkt ) ) {
ret2 = write_packet ( oc , & ( ost - > enc - > time_base ) , ost - > st , & pkt ) ;
if ( ret2 < 0 ) {
fprintf ( stderr , " Error while writing video frame: %s \n " , av_err2str ( ret2 ) ) ;
/* continue on this error to not gum up encoder */
}
}
}
}
if ( ret2 < 0 ) return ret2 ;
if ( ! ( ret = = AVERROR ( EAGAIN ) ) ) return ret ; // if AVERROR(EAGAIN), means all available packets output, need more frames (i.e. success)
return 0 ;
}
}
void write_video_frame ( AVFormatContext * oc , AVStream * st )
/*
* Write final video frame ( i . e . drain codec ) .
*/
int write_final_video_frame ( AVFormatContext * oc , VideoOutputStream * ost )
{
{
int out_size , ret ;
int ret , ret2 ;
AVCodecContext * c ;
AVPacket pkt = { 0 } ;
AVFrame * picture_ptr ;
/* send NULL image to encoder */
# if LIBAVFORMAT_BUILD<4629
ret = avcodec_send_frame ( ost - > enc , NULL ) ;
c = & st - > codec ;
if ( ret < 0 ) {
# else
fprintf ( stderr , " Error sending final video frame to encoder: %s \n " , av_err2str ( ret ) ) ;
c = st - > codec ;
return ret ;
# endif
if ( c - > pix_fmt ! = PIX_FMT_RGB565 ) {
/* as we only generate a RGB565 picture, we must convert it
to the codec pixel format if needed */
img_convert ( ( AVPicture * ) picture , c - > pix_fmt ,
( AVPicture * ) tmp_picture , PIX_FMT_RGB565 ,
c - > width , c - > height ) ;
}
picture_ptr = picture ;
if ( oc - > oformat - > flags & AVFMT_RAWPICTURE ) {
/* raw video case. The API will change slightly in the near
futur for that */
AVPacket pkt ;
av_init_packet ( & pkt ) ;
pkt . flags | = PKT_FLAG_KEY ;
pkt . stream_index = st - > index ;
pkt . data = ( uint8_t * ) picture_ptr ;
pkt . size = sizeof ( AVPicture ) ;
ret = av_write_frame ( oc , & pkt ) ;
} else {
/* encode the image */
out_size = avcodec_encode_video ( c , video_outbuf , video_outbuf_size , picture_ptr ) ;
/* if zero size, it means the image was buffered */
if ( out_size ! = 0 ) {
AVPacket pkt ;
av_init_packet ( & pkt ) ;
pkt . pts = c - > coded_frame - > pts ;
if ( c - > coded_frame - > key_frame )
pkt . flags | = PKT_FLAG_KEY ;
pkt . stream_index = st - > index ;
pkt . data = video_outbuf ;
pkt . size = out_size ;
/* write the compressed frame in the media file */
ret = av_write_frame ( oc , & pkt ) ;
} else {
ret = 0 ;
}
}
}
if ( ret ! = 0 ) {
/* read all available packets */
fprintf ( stderr , " Error while writing video frame \n " ) ;
ret2 = 0 ;
exit ( 1 ) ;
for ( ret = avcodec_receive_packet ( ost - > enc , & pkt ) ; ret = = 0 ; ret = avcodec_receive_packet ( ost - > enc , & pkt ) ) {
ret2 = write_packet ( oc , & ( ost - > enc - > time_base ) , ost - > st , & pkt ) ;
if ( ret2 < 0 ) {
fprintf ( stderr , " Error while writing final video frame: %s \n " , av_err2str ( ret2 ) ) ;
/* continue on this error to not gum up encoder */
}
}
}
frame_count + + ;
if ( ret2 < 0 ) return ret2 ;
if ( ! ( ret = = AVERROR ( EOF ) ) ) return ret ;
return 0 ;
}
}
void close_video ( AVFormatContext * oc , AVStream * st )
void close_video_stream ( VideoOutputStream * ost )
{
{
avcodec_close ( st - > codec ) ;
avcodec_free_context ( & ( ost - > enc ) ) ;
av_free ( picture - > data [ 0 ] ) ;
av_frame_free ( & ( ost - > frame ) ) ;
av_free ( picture ) ;
av_frame_free ( & ( ost - > tmp_frame ) ) ;
if ( tmp_picture ) {
sws_freeContext ( ost - > sws ) ; ost - > sws = NULL ;
av_free ( tmp_picture - > data [ 0 ] ) ;
ost - > codec = NULL ; /* codec not an allocated item */
av_free ( tmp_picture ) ;
ost - > st = NULL ; /* freeing parent oc will free this memory */
}
av_free ( video_outbuf ) ;
}
}
static const char * filename ;
/**************************************************************/
static AVOutputFormat * fmt ;
/* Output movie handling */
static AVFormatContext * oc ;
AVFormatContext * movie_open ( char * filename , VideoOutputStream * video_st , int br , int fr , int w , int h ) {
static AVStream * video_st ;
int ret ;
static double video_pts ;
AVFormatContext * oc ;
static int movie_open ( int w , int h ) {
/* allocate the output media context. */
if ( fmt - > video_codec ! = CODEC_ID_NONE ) {
ret = avformat_alloc_output_context2 ( & oc , NULL , NULL , filename ) ;
video_st = add_video_stream ( oc , fmt - > video_codec , w , h ) ;
if ( ret < 0 ) {
} else
fprintf ( stderr , " Warning: Could not deduce output format from file extension: using MP4. \n " ) ;
return 1 ;
ret = avformat_alloc_output_context2 ( & oc , NULL , " mp4 " , filename ) ;
/* set the output parameters (must be done even if no
parameters ) . */
if ( av_set_parameters ( oc , NULL ) < 0 ) {
fprintf ( stderr , " Invalid output format parameters \n " ) ;
return 2 ;
}
}
if ( ret < 0 ) {
fprintf ( stderr , " Error: Could not allocate media context: %s. \n " , av_err2str ( ret ) ) ;
return NULL ;
} // from now on, need to call avformat_free_context(oc); oc=NULL; to free memory on error
dump_format ( oc , 0 , filename , 1 ) ;
/* Add the video stream using the default format codec and initialize the codec. */
if ( oc - > oformat - > video_codec ! = AV_CODEC_ID_NONE ) {
/* now that all the parameters are set, we can open the audio and
ret = add_video_stream ( video_st , oc , oc - > oformat - > video_codec , br , fr , w , h ) ;
video codecs and allocate the necessary encode buffers */
} else {
if ( video_st )
ret = - 1 ;
open_video ( oc , video_st ) ;
}
if ( ret < 0 ) {
fprintf ( stderr , " Error: chosen output format does not have a video codec, or error %i \n " , ret ) ;
avformat_free_context ( oc ) ; oc = NULL ;
return NULL ;
} // from now on, need to call close_video_stream(video_st) to free memory on error
/* Now that all the parameters are set, we can open the codecs and allocate the necessary encode buffers. */
ret = open_video ( oc , video_st ) ;
if ( ret < 0 ) {
fprintf ( stderr , " Error: error opening video codec, error %i \n " , ret ) ;
close_video_stream ( video_st ) ;
avformat_free_context ( oc ) ; oc = NULL ;
return NULL ;
} // no additional calls required to free memory, as close_video_stream(video_st) will do it
/* open the output file, if needed */
/* open the output file, if needed */
if ( ! ( fmt - > flags & AVFMT_NOFILE ) ) {
if ( ! ( oc - > oformat - > flags & AVFMT_NOFILE ) ) {
if ( url_fopen ( & oc - > pb , filename , URL_WRONLY ) < 0 ) {
ret = avio_open ( & oc - > pb , filename , AVIO_FLAG_WRITE ) ;
fprintf ( stderr , " Could not open '%s' \n " , filename ) ;
if ( ret < 0 ) {
return 3 ;
fprintf ( stderr , " Could not open '%s': %s \n " , filename ,
av_err2str ( ret ) ) ;
close_video_stream ( video_st ) ;
avformat_free_context ( oc ) ; oc = NULL ;
return NULL ;
}
}
}
} // will need to call avio_closep(&oc->pb) to free file handle on error
/* write the stream header, if any */
/* Write the stream header, if any. */
av_write_header ( oc ) ;
ret = avformat_write_header ( oc , NULL ) ;
if ( ret < 0 ) {
return 0 ;
fprintf ( stderr , " Error occurred when writing to output file: %s \n " ,
av_err2str ( ret ) ) ;
if ( ! ( oc - > oformat - > flags & AVFMT_NOFILE ) )
avio_closep ( & oc - > pb ) ;
close_video_stream ( video_st ) ;
avformat_free_context ( oc ) ; oc = NULL ;
} // no additional items to free
return oc ;
}
}
static int movie_close ( ) {
void movie_close ( AVFormatContext * * ocp , VideoOutputStream * video_st ) {
int i ;
AVFormatContext * oc = * ocp ;
/* Write the trailer, if any. The trailer must be written before you
* close the CodecContexts open when you wrote the header ; otherwise
* av_write_trailer ( ) may try to use memory that was freed on
* av_codec_close ( ) . */
if ( oc ) {
if ( video_st )
write_final_video_frame ( oc , video_st ) ;
/* close each codec */
av_write_trailer ( oc ) ;
close_video ( oc , video_st ) ;
/* write the trailer, if any */
/* Close the video codec. */
av_write_trailer ( oc ) ;
close_video_stream ( video_st ) ;
/* free the streams */
for ( i = 0 ; i < oc - > nb_streams ; i + + ) {
av_freep ( & oc - > streams [ i ] ) ;
}
if ( ! ( fmt - > flags & AVFMT_NOFILE ) ) {
if ( ! ( oc - > oformat - > flags & AVFMT_NOFILE ) )
/* close the output file */
/* Close the output file. */
url_fclose ( & oc - > pb ) ;
avio_closep ( & oc - > pb ) ;
}
/* free the stream */
/* free the stream */
av_free ( oc ) ;
avformat_free_context ( oc ) ;
ocp = NULL ;
}
}
/**************************************************************/
/* VNC globals */
VideoOutputStream video_st = { 0 } ;
rfbClient * client = NULL ;
rfbBool quit = FALSE ;
char * filename = NULL ;
AVFormatContext * oc = NULL ;
int bitrate = 1000000 ;
int framerate = 5 ;
long max_time = 0 ;
struct timespec start_time , cur_time ;
/* Signal handling */
void signal_handler ( int signal ) {
quit = TRUE ;
}
}
static rfbBool quit = FALSE ;
/* returns time since start in pts units */
static void signal_handler ( int signal ) {
int64_t time_to_pts ( int framerate , struct timespec * start_time , struct timespec * cur_time ) {
fprintf ( stderr , " Cleaning up. \n " ) ;
time_t ds = cur_time - > tv_sec - start_time - > tv_sec ;
quit = TRUE ;
long dns = cur_time - > tv_nsec - start_time - > tv_nsec ;
/* use usecs */
int64_t dt = ( int64_t ) ds * ( int64_t ) 1000000 + ( int64_t ) dns / ( int64_t ) 1000 ;
/* compute rv in units of frame number (rounding to nearest, not truncating) */
int64_t rv = ( ( ( int64_t ) framerate ) * dt + ( int64_t ) 500000 ) / ( int64_t ) ( 1000000 ) ;
return rv ;
}
}
/**************************************************************/
/* VNC callback functions */
/* VNC callback functions */
static rfbBool resize ( rfbClient * client ) {
rfbBool vnc_malloc_fb ( rfbClient * client ) {
static rfbBool first = TRUE ;
movie_close ( & oc , & video_st ) ;
if ( ! first ) {
oc = movie_open ( filename , & video_st , bitrate , framerate , client - > width , client - > height ) ;
movie_close ( ) ;
if ( ! oc )
perror ( " I don't know yet how to change resolutions! \n " ) ;
return FALSE ;
}
signal ( SIGINT , signal_handler ) ;
movie_open ( client - > width , client - > height ) ;
signal ( SIGTERM , signal_handler ) ;
signal ( SIGINT , signal_handler ) ;
signal ( SIGQUIT , signal_handler ) ;
if ( tmp_picture )
signal ( SIGABRT , signal_handler ) ;
client - > frameBuffer = tmp_picture - > data [ 0 ] ;
/* These assignments assumes the AVFrame buffer is contigous. This is true in current ffmpeg versions for
else
* most non - HW accelerated bits , but may not be true globally . */
client - > frameBuffer = picture - > data [ 0 ] ;
if ( video_st . tmp_frame )
return TRUE ;
client - > frameBuffer = video_st . tmp_frame - > data [ 0 ] ;
else
client - > frameBuffer = video_st . frame - > data [ 0 ] ;
return TRUE ;
}
}
static void update ( rfbClient * client , int x , int y , int w , int h ) {
void vnc_ update( rfbClient * client , int x , int y , int w , int h ) {
}
}
/**************************************************************/
/**************************************************************/
/* media file output */
/* media file output */
int main ( int argc , char * * argv )
int main ( int argc , char * * argv )
{
{
time_t stop = 0 ;
rfbClient * client ;
int i , j ;
int i , j ;
/* get a vnc client structure (don't connect yet). */
/* Initialize vnc client structure (don't connect yet). */
client = rfbGetClient ( 5 , 3 , 2 ) ;
client = rfbGetClient ( 5 , 3 , 2 ) ;
client - > format . redShift = 11 ; client - > format . redMax = 31 ;
client - > format . redShift = 11 ; client - > format . redMax = 31 ;
client - > format . greenShift = 5 ; client - > format . greenMax = 63 ;
client - > format . greenShift = 5 ; client - > format . greenMax = 63 ;
client - > format . blueShift = 0 ; client - > format . blueMax = 31 ;
client - > format . blueShift = 0 ; client - > format . blueMax = 31 ;
/* i nitialize libavcodec, and register all codecs and formats */
/* I nitialize libavcodec, and register all codecs and formats. */
av_register_all ( ) ;
av_register_all ( ) ;
if ( ! strncmp ( argv [ argc - 1 ] , " : " , 1 ) | |
! strncmp ( argv [ argc - 1 ] , " 127.0.0.1 " , 9 ) | |
! strncmp ( argv [ argc - 1 ] , " localhost " , 9 ) )
client - > appData . encodingsString = " raw " ;
filename = 0 ;
/* Parse command line. */
for ( i = 1 ; i < argc ; i + + ) {
for ( i = 1 ; i < argc ; i + + ) {
j = i ;
j = i ;
if ( argc > i + 1 & & ! strcmp ( " -o " , argv [ i ] ) ) {
if ( argc > i + 1 & & ! strcmp ( " -o " , argv [ i ] ) ) {
filename = argv [ 2 ] ;
filename = argv [ i + 1 ] ;
j + = 2 ;
j + = 2 ;
} else if ( argc > i + 1 & & ! strcmp ( " -t " , argv [ i ] ) ) {
} else if ( argc > i + 1 & & ! strcmp ( " -t " , argv [ i ] ) ) {
stop = time ( 0 ) + atoi ( argv [ i + 1 ] ) ;
max_time = atol ( argv [ i + 1 ] ) ;
j + = 2 ;
if ( max_time < 10 | | max_time > 100000000 ) {
}
fprintf ( stderr , " Warning: Nonsensical time-per-file %li, resetting to default. \n " , max_time ) ;
if ( j > i ) {
max_time = 0 ;
argc - = j - i ;
}
memmove ( argv + i , argv + j , ( argc - i ) * sizeof ( char * ) ) ;
j + = 2 ;
i - - ;
}
}
/* This is so that argc/argv are ready for passing to rfbInitClient */
if ( j > i ) {
argc - = j - i ;
memmove ( argv + i , argv + j , ( argc - i ) * sizeof ( char * ) ) ;
i - - ;
}
}
}
/* default filename. */
/* auto detect the output format from the name. default is
if ( ! filename ) {
mpeg . */
fprintf ( stderr , " Warning: No filename specified. Using output.mp4 \n " ) ;
fmt = filename ? guess_format ( NULL , filename , NULL ) : 0 ;
filename = " output.mp4 " ;
if ( ! fmt ) {
printf ( " Could not deduce output format from file extension: using MPEG. \n " ) ;
fmt = guess_format ( " mpeg " , NULL , NULL ) ;
}
if ( ! fmt ) {
fprintf ( stderr , " Could not find suitable output format \n " ) ;
exit ( 1 ) ;
}
}
/* allocate the output media context */
oc = av_alloc_format_context ( ) ;
if ( ! oc ) {
fprintf ( stderr , " Memory error \n " ) ;
exit ( 1 ) ;
}
oc - > oformat = fmt ;
snprintf ( oc - > filename , sizeof ( oc - > filename ) , " %s " , filename ) ;
/* add the audio and video streams using the default format codecs
and initialize the codecs */
video_st = NULL ;
/* open VNC connection */
/* open VNC connection. */
client - > MallocFrameBuffer = resize ;
client - > MallocFrameBuffer = vnc_malloc_fb ;
client - > GotFrameBufferUpdate = update;
client - > GotFrameBufferUpdate = vnc_update ;
if ( ! rfbInitClient ( client , & argc , argv ) ) {
if ( ! rfbInitClient ( client , & argc , argv ) ) {
printf ( " usage: %s [-o output_file] [-t seconds] server:port \n "
printf ( " usage: %s [-o output_file] [-t seconds-per-file] server:port \n " , argv [ 0 ] ) ;
" Shoot a movie from a VNC server. \n " , argv [ 0 ] ) ;
return 1 ;
exit ( 1 ) ;
}
}
if ( client - > serverPort = = - 1 )
client - > vncRec - > doNotSleep = TRUE ; /* vncrec playback */
/* main loop */
/* main loop */
clock_gettime ( CLOCK_MONOTONIC , & start_time ) ;
while ( ! quit ) {
while ( ! quit ) {
int i = WaitForMessage ( client , 1000000 / STREAM_FRAME_RATE ) ;
int i = WaitForMessage ( client , 10000 / framerate ) ; /* useful for timeout to be no more than 10 msec per second (=10000/framerate usec) */
if ( i < 0 ) {
if ( i > 0 ) {
movie_close ( ) ;
if ( ! HandleRFBServerMessage ( client ) )
return 0 ;
quit = TRUE ;
} else if ( i < 0 ) {
quit = TRUE ;
}
}
if ( i )
if ( ! quit ) {
if ( ! HandleRFBServerMessage ( client ) )
clock_gettime ( CLOCK_MONOTONIC , & cur_time ) ;
quit = TRUE ;
write_video_frame ( oc , & video_st , update_time_for_next ( framerate , & start_time , & cur_time ) ) ;
else {
if ( ( cur_time . tv_sec - start_time . tv_sec ) > max_time & & max_time > 0 ) {
/* compute current audio and video time */
quit = TRUE ;
video_pts = ( double ) video_st - > pts . val * video_st - > time_base . num / video_st - > time_base . den ;
}
}
/* write interleaved audio and video frames */
write_video_frame ( oc , video_st ) ;
}
if ( stop ! = 0 & & stop < time ( 0 ) )
quit = TRUE ;
}
}
movie_close ( & oc , & video_st ) ;
movie_close ( ) ;
return 0 ;
return 0 ;
}
}