-rw-r--r-- | noncore/multimedia/opieplayer2/lib.cpp | 15 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 359 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/opieplayer2.pro | 2 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/xinevideowidget.cpp | 1 |
4 files changed, 236 insertions, 141 deletions
diff --git a/noncore/multimedia/opieplayer2/lib.cpp b/noncore/multimedia/opieplayer2/lib.cpp index 1ab5c96..248221b 100644 --- a/noncore/multimedia/opieplayer2/lib.cpp +++ b/noncore/multimedia/opieplayer2/lib.cpp @@ -45,8 +45,9 @@ using namespace Opie::Core; #include <qgfx_qws.h> /* STD */ #include <assert.h> +#include <unistd.h> typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, int width, int height,int bytes ); @@ -107,12 +108,12 @@ Lib::Lib( InitializationMode initMode, XineVideoWidget* widget ) } void Lib::run() { - odebug << "Lib::run() started" << oendl; + odebug << "Lib::run() started" << oendl; initialize(); m_initialized = true; - odebug << "Lib::run() finished" << oendl; + odebug << "Lib::run() finished" << oendl; } void Lib::initialize() { @@ -145,9 +146,9 @@ void Lib::initialize() m_queue = xine_event_new_queue (m_stream); xine_event_create_listener_thread (m_queue, xine_event_handler, this); - ::null_preload_decoders( m_stream ); + ::null_preload_decoders( m_stream ); m_duringInitialization = false; } @@ -216,9 +217,9 @@ int Lib::play( const QString& fileName, int startPos, int start_time ) { void Lib::stop() { assert( m_initialized ); - odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; + odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; xine_stop( m_stream ); } void Lib::pause( bool toggle ) { @@ -328,11 +329,11 @@ void Lib::ensureInitialized() { if ( m_initialized ) return; - odebug << "waiting for initialization thread to finish" << oendl; + odebug << "waiting for initialization thread to finish" << oendl; wait(); - odebug << "initialization thread finished!" << oendl; + odebug << "initialization thread finished!" << oendl; } void Lib::setWidget( XineVideoWidget *widget ) { @@ -423,9 +424,9 @@ void Lib::xine_display_frame( void* user_data, uint8_t *frame, void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { assert( m_initialized ); if ( !m_video ) { - owarn << "not showing video now" << oendl; + owarn << "not showing video now" << oendl; return; } assert( m_wid ); diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 378bbd4..6769a37 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c @@ -3,8 +3,9 @@ Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> Copyright (c) 2002 LJP <> Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> + Copyright (c) 2002-2003 Miguel Freitas of xine =. .=l. .>+-= _;:, .> :=|. This program is free software; you can @@ -112,9 +113,9 @@ static uint32_t null_get_capabilities( vo_driver_t *self ){ null_driver_t* this = (null_driver_t*)self; return this->m_capabilities; } -static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { +static void null_frame_proc_slice (vo_frame_t *vo_img, uint8_t **src) { opie_frame_t *frame = (opie_frame_t *) vo_img ; vo_img->proc_called = 1; @@ -174,20 +175,19 @@ static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ #ifdef LOG fprintf (stderr, "nullvideo: alloc_frame\n"); #endif - frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); + frame = (opie_frame_t*)xine_xmalloc ( sizeof(opie_frame_t) ); - memset( frame, 0, sizeof( opie_frame_t) ); memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); pthread_mutex_init (&frame->frame.mutex, NULL); frame->output = this; /* initialize the frame*/ frame->frame.driver = self; - frame->frame.proc_slice = null_frame_copy; + frame->frame.proc_slice = null_frame_proc_slice; frame->frame.field = null_frame_field; frame->frame.dispose = null_frame_dispose; /* @@ -197,53 +197,36 @@ static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ return (vo_frame_t*) frame; } -static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, - uint32_t width, uint32_t height, - double ratio_code, int format, - int flags ){ - null_driver_t* this = (null_driver_t*) self; - opie_frame_t* frame = (opie_frame_t*)img; - /* not needed now */ -#ifdef LOG - fprintf (stderr, "nullvideo: update_frame_format\n"); -#endif +static void null_frame_compute_ideal_size( null_driver_t *this, + opie_frame_t *frame ) { + this = this; - flags &= VO_BOTH_FIELDS; + _x_vo_scale_compute_ideal_size(&frame->sc); +} - /* find out if we need to adapt this frame */ +static void null_frame_compute_rgb_size( null_driver_t *this, + opie_frame_t *frame ){ + this = this; - if ((width != frame->sc.delivered_width) - || (height != frame->sc.delivered_height) - || (ratio_code != frame->sc.delivered_ratio) - || (flags != frame->flags) - || (format != frame->format) - || (this->sc.user_ratio != frame->sc.user_ratio) - || (this->gui_width != frame->sc.gui_width) - || (this->gui_height != frame->sc.gui_height)) { + _x_vo_scale_compute_output_size(&frame->sc); - frame->sc.delivered_width = width; - frame->sc.delivered_height = height; - frame->sc.delivered_ratio = ratio_code; - frame->flags = flags; - frame->format = format; - frame->sc.user_ratio = this->sc.user_ratio; - frame->sc.gui_width = this->gui_width; - frame->sc.gui_height = this->gui_height; - frame->sc.gui_pixel_aspect = 1.0; + /* avoid problems in yuv2rgb */ + if(frame->sc.output_height < (frame->sc.delivered_height+15) >> 4) + frame->sc.output_height = (frame->sc.delivered_height+15) >> 4; - vo_scale_compute_ideal_size ( &frame->sc ); - vo_scale_compute_output_size( &frame->sc ); + if (frame->sc.output_width < 8) + frame->sc.output_width = 8; -#ifdef LOG - fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", - frame->sc.gui_width, frame->sc.gui_height, - frame->sc.delivered_width, frame->sc.delivered_height, - frame->sc.output_width, frame->sc.output_height); -#endif + /* yuv2rgb_mlib needs an even YUV2 width */ + if (frame->sc.output_width & 1) + frame->sc.output_width++; +} +static void null_frame_reallocate( null_driver_t *this, opie_frame_t *frame, + uint32_t width, uint32_t height, int format){ /* * (re-) allocate */ if( frame->data ) { @@ -261,9 +244,9 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, } free ( frame->data ); } - frame->data = xine_xmalloc (frame->sc.output_width + frame->data = xine_xmalloc (frame->sc.output_width * frame->sc.output_height * this->bytes_per_pixel ); if( format == XINE_IMGFMT_YV12 ) { @@ -282,65 +265,118 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, frame->chunk[1] = NULL; frame->chunk[2] = NULL; } +} + +static void null_setup_colorspace_converter(opie_frame_t *frame, int flags ) { + switch (flags) { + case VO_TOP_FIELD: + case VO_BOTTOM_FIELD: + frame->yuv2rgb->configure (frame->yuv2rgb, + frame->sc.delivered_width, + 16, + 2*frame->frame.pitches[0], + 2*frame->frame.pitches[1], + frame->sc.output_width, + frame->stripe_height, + frame->bytes_per_line*2); + frame->yuv_stride = frame->bytes_per_line*2; + break; + case VO_BOTH_FIELDS: + frame->yuv2rgb->configure (frame->yuv2rgb, + frame->sc.delivered_width, + 16, + frame->frame.pitches[0], + frame->frame.pitches[1], + frame->sc.output_width, + frame->stripe_height, + frame->bytes_per_line); + frame->yuv_stride = frame->bytes_per_line; + break; + } +#ifdef LOG + fprintf (stderr, "nullvideo: colorspace converter configured.\n"); +#endif +} + +static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, + uint32_t width, uint32_t height, + double ratio_code, int format, + int flags ){ + null_driver_t* this = (null_driver_t*) self; + opie_frame_t* frame = (opie_frame_t*)img; + +#ifdef LOG + fprintf (stderr, "nullvideo: update_frame_format\n"); +#endif + + flags &= VO_BOTH_FIELDS; + + /* find out if we need to adapt this frame */ + + if ((width != frame->sc.delivered_width) + || (height != frame->sc.delivered_height) + || (ratio_code != frame->sc.delivered_ratio) + || (flags != frame->flags) + || (format != frame->format) + || (this->sc.user_ratio != frame->sc.user_ratio) + || (this->gui_width != frame->sc.gui_width) + || (this->gui_height != frame->sc.gui_height)) { + + frame->sc.delivered_width = width; + frame->sc.delivered_height = height; + frame->sc.delivered_ratio = ratio_code; + frame->flags = flags; + frame->format = format; + frame->sc.user_ratio = this->sc.user_ratio; + frame->sc.gui_width = this->gui_width; + frame->sc.gui_height = this->gui_height; + frame->sc.gui_pixel_aspect = 1.0; + + + null_frame_compute_ideal_size(this, frame); + null_frame_compute_rgb_size(this, frame); + null_frame_reallocate(this, frame, width, height, format); + +#ifdef LOG + fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", + frame->sc.gui_width, frame->sc.gui_height, + frame->sc.delivered_width, frame->sc.delivered_height, + frame->sc.output_width, frame->sc.output_height); +#endif + + + frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; /* * set up colorspace converter */ + null_setup_colorspace_converter(frame, flags); - switch (flags) { - case VO_TOP_FIELD: - case VO_BOTTOM_FIELD: - frame->yuv2rgb->configure (frame->yuv2rgb, - frame->sc.delivered_width, - 16, - 2*frame->frame.pitches[0], - 2*frame->frame.pitches[1], - frame->sc.output_width, - frame->stripe_height, - frame->bytes_per_line*2); - frame->yuv_stride = frame->bytes_per_line*2; - break; - case VO_BOTH_FIELDS: - frame->yuv2rgb->configure (frame->yuv2rgb, - frame->sc.delivered_width, - 16, - frame->frame.pitches[0], - frame->frame.pitches[1], - frame->sc.output_width, - frame->stripe_height, - frame->bytes_per_line); - frame->yuv_stride = frame->bytes_per_line; - break; - } -#ifdef LOG - fprintf (stderr, "nullvideo: colorspace converter configured.\n"); -#endif } - /* * reset dest pointers */ - if (frame->data) { - switch (flags) { - case VO_TOP_FIELD: - frame->rgb_dst = (uint8_t *)frame->data; - frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; - break; - case VO_BOTTOM_FIELD: - frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; - frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; - break; - case VO_BOTH_FIELDS: - frame->rgb_dst = (uint8_t *)frame->data; - frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; - break; + if (frame->data) { + switch (flags) { + case VO_TOP_FIELD: + frame->rgb_dst = (uint8_t *)frame->data; + frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; + break; + case VO_BOTTOM_FIELD: + frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; + frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; + break; + case VO_BOTH_FIELDS: + frame->rgb_dst = (uint8_t *)frame->data; + frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; + break; + } } - } } static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ null_driver_t* this = (null_driver_t*) self; @@ -363,16 +399,20 @@ static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ /* blending related */ static void null_overlay_clut_yuv2rgb (null_driver_t *this, vo_overlay_t *overlay, opie_frame_t *frame) { + this = this; + + int i; clut_t* clut = (clut_t*) overlay->color; if (!overlay->rgb_clut) { for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { *((uint32_t *)&clut[i]) = - frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, - clut[i].y, clut[i].cb, - clut[i].cr); + frame->yuv2rgb-> + yuv2rgb_single_pixel_fun (frame->yuv2rgb, + clut[i].y, clut[i].cb, + clut[i].cr); } overlay->rgb_clut++; } if (!overlay->clip_rgb_clut) { @@ -400,72 +440,135 @@ static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, if( !overlay->rgb_clut || !overlay->clip_rgb_clut) null_overlay_clut_yuv2rgb(this,overlay,frame); switch(this->bpp) { - case 16: - blend_rgb16( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); - break; - case 24: - blend_rgb24( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); - break; - case 32: - blend_rgb32( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); - break; - default: - /* It should never get here */ - break; + case 16: + blend_rgb16((uint8_t *)frame->data, + overlay, + frame->sc.output_width, + frame->sc.output_height, + frame->sc.delivered_width, + frame->sc.delivered_height); + break; + case 24: + blend_rgb24((uint8_t *)frame->data, + overlay, + frame->sc.output_width, + frame->sc.output_height, + frame->sc.delivered_width, + frame->sc.delivered_height); + break; + case 32: + blend_rgb32((uint8_t *)frame->data, + overlay, + frame->sc.output_width, + frame->sc.output_height, + frame->sc.delivered_width, + frame->sc.delivered_height); + break; + default: + /* It should never get here */ + break; } } } -static int null_get_property( vo_driver_t* self, - int property ){ +static int null_get_property( vo_driver_t* self, int property ){ +#if 0 + null_driver_t *this = (null_driver_t *)self; + + switch(property) + { + case VO_PROP_ASPECT_RATIO: + return this->sc.user_ratio; + case VO_PROP_BRIGHTNESS: + return this->yuv2rgb_brightness; + case VO_PROP_WINDOW_WIDTH: + return this->sc.gui_width; + case VO_PROP_WINDOW_HEIGHT: + return this->sc.gui_height; + default: + break; + } +#else + property = property; + self = self; +#endif + return 0; } -static int null_set_property( vo_driver_t* self, - int property, +static int null_set_property( vo_driver_t* self, int property, int value ){ +#if 0 + null_driver_t *this = (null_driver_t *)self; + + switch(property) + { + case VO_PROP_ASPECT_RATIO: + if(value>=XINE_VO_ASPECT_NUM_RATIOS) + value = XINE_VO_ASPECT_AUTO; + this->sc.user_ratio = value; + break; + case VO_PROP_BRIGHTNESS: + this->yuv2rgb_brightness = value; + this->yuv2rgb_factory-> + set_csc_levels(this->yuv2rgb_factory, value, 128, 128); + break; + default: + break; + } +#else + self = self; + property = property; +#endif + return value; } static void null_get_property_min_max( vo_driver_t* self, int property, int *min, int *max ){ + self = self; + property = property; + *max = 0; *min = 0; } static int null_gui_data_exchange( vo_driver_t* self, int data_type, void *data ){ + self = self; + data_type = data_type; + data = data; + return 0; } static void null_dispose ( vo_driver_t* self ){ null_driver_t* this = (null_driver_t*)self; free ( this ); } static int null_redraw_needed( vo_driver_t* self ){ - return 0; + self = self; + + return 0; } -xine_vo_driver_t* init_video_out_plugin( xine_t *xine, +xine_video_port_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t frameDisplayFunc, void *userData ){ + video = video; + + null_driver_t *vo; vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); /* memset? */ memset(vo,0, sizeof(null_driver_t ) ); - vo_scale_init (&vo->sc, 0, 0, xine->config); + _x_vo_scale_init (&vo->sc, 0, 0, xine->config); vo->sc.gui_pixel_aspect = 1.0; vo->m_show_video = 0; // false @@ -490,31 +593,18 @@ xine_vo_driver_t* init_video_out_plugin( xine_t *xine, vo->vo_driver.redraw_needed = null_redraw_needed; /* capabilities */ - vo->m_capabilities = /* VO_CAP_COPIES_IMAGE | */ VO_CAP_YUY2 | VO_CAP_YV12; + vo->m_capabilities = VO_CAP_YUY2 | VO_CAP_YV12; vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, vo->yuv2rgb_cmap); vo->caller = userData; vo->frameDis = frameDisplayFunc; - /* return ( vo_driver_t*) vo; */ - return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); + return _x_vo_new_port(xine, &vo->vo_driver, 0); } -#if 0 -static vo_info_t vo_info_null = { - 5, - XINE_VISUAL_TYPE_FB -}; - -vo_info_t *get_video_out_plugin_info(){ - vo_info_null.description = ("xine video output plugin using null device"); - return &vo_info_null; -} - -#endif /* this is special for this device */ /** * We know that we will be controled by the XINE LIB++ @@ -563,10 +653,9 @@ void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { this->bytes_per_pixel = (depth + 7 ) / 8; this->bpp = this->bytes_per_pixel * 8; this->depth = depth; - printf("depth %d %d\n", depth, this->bpp); - printf("pixeltype %d\n", rgb ); + switch ( this->depth ) { case 32: if( rgb == 0 ) this->yuv2rgb_mode = MODE_32_RGB; @@ -627,16 +716,20 @@ void null_preload_decoders( xine_stream_t *stream ) static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); uint8_t i; +#if 0 for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { - audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); + audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); + decoder = decoder; /* free_audio_decoder( stream, decoder ); */ } for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { - video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); + video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); + decoder = decoder; /* free_video_decoder( stream, decoder ); */ } +#endif } diff --git a/noncore/multimedia/opieplayer2/opieplayer2.pro b/noncore/multimedia/opieplayer2/opieplayer2.pro index b9a8d6d..8166658 100644 --- a/noncore/multimedia/opieplayer2/opieplayer2.pro +++ b/noncore/multimedia/opieplayer2/opieplayer2.pro @@ -14,9 +14,9 @@ SOURCES = main.cpp \ skin.cpp TARGET = opieplayer2 INCLUDEPATH += $(OPIEDIR)/include DEPENDPATH += $(OPIEDIR)/include -LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lxine -lstdc++ +LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lqtaux2 -lxine MOC_DIR = qpeobj OBJECTS_DIR = qpeobj include ( $(OPIEDIR)/include.pro ) diff --git a/noncore/multimedia/opieplayer2/xinevideowidget.cpp b/noncore/multimedia/opieplayer2/xinevideowidget.cpp index 15c611f..1ac9277 100644 --- a/noncore/multimedia/opieplayer2/xinevideowidget.cpp +++ b/noncore/multimedia/opieplayer2/xinevideowidget.cpp @@ -50,8 +50,9 @@ * This code relies the len be a multiply of 16bit */ static inline void memcpy_rev ( void *_dst, void *_src, size_t len ) { + /* * move the source to the end */ char *src_c = static_cast<char*>(_src) + len; |