author | harlekin <harlekin> | 2003-02-07 22:37:50 (UTC) |
---|---|---|
committer | harlekin <harlekin> | 2003-02-07 22:37:50 (UTC) |
commit | 0ea4452337324a98cad7d809a6e06d90447ee8a0 (patch) (side-by-side diff) | |
tree | f27fe168fa354e662052132913362b4a267165ff | |
parent | 014608882b23343473c5d0e7dca3ea09cf02dcfb (diff) | |
download | opie-0ea4452337324a98cad7d809a6e06d90447ee8a0.zip opie-0ea4452337324a98cad7d809a6e06d90447ee8a0.tar.gz opie-0ea4452337324a98cad7d809a6e06d90447ee8a0.tar.bz2 |
0 not 1
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 2 |
1 files changed, 1 insertions, 1 deletions
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 3cf3664..f28fba3 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c @@ -401,193 +401,193 @@ static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, v case 16: blend_rgb16( (uint8_t *)frame->data, overlay, frame->sc.output_width, frame->sc.output_height, frame->sc.delivered_width, frame->sc.delivered_height); break; case 24: blend_rgb24( (uint8_t *)frame->data, overlay, frame->sc.output_width, frame->sc.output_height, frame->sc.delivered_width, frame->sc.delivered_height); break; case 32: blend_rgb32( (uint8_t *)frame->data, overlay, frame->sc.output_width, frame->sc.output_height, frame->sc.delivered_width, frame->sc.delivered_height); break; default: /* It should never get here */ break; } } } static int null_get_property( vo_driver_t* self, int property ){ return 0; } static int null_set_property( vo_driver_t* self, int property, int value ){ return value; } static void null_get_property_min_max( vo_driver_t* self, int property, int *min, int *max ){ *max = 0; *min = 0; } static int null_gui_data_exchange( vo_driver_t* self, int data_type, void *data ){ return 0; } static void null_dispose ( vo_driver_t* self ){ null_driver_t* this = (null_driver_t*)self; free ( this ); } static int null_redraw_needed( vo_driver_t* self ){ return 0; } xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t frameDisplayFunc, void *userData ){ null_driver_t *vo; vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); /* memset? */ memset(vo,0, sizeof(null_driver_t ) ); vo_scale_init (&vo->sc, 0, 0, xine->config); vo->sc.gui_pixel_aspect = 1.0; vo->m_show_video = 0; // false vo->m_video_fullscreen = 0; vo->m_is_scaling = 0; vo->display_ratio = 1.0; vo->gui_width = 16; vo->gui_height = 8; vo->frameDis = NULL; /* install callback handlers*/ vo->vo_driver.get_capabilities = null_get_capabilities; vo->vo_driver.alloc_frame = null_alloc_frame; vo->vo_driver.update_frame_format = null_update_frame_format; vo->vo_driver.display_frame = null_display_frame; vo->vo_driver.overlay_blend = null_overlay_blend; vo->vo_driver.get_property = null_get_property; vo->vo_driver.set_property = null_set_property; vo->vo_driver.get_property_min_max = null_get_property_min_max; vo->vo_driver.gui_data_exchange = null_gui_data_exchange; vo->vo_driver.dispose = null_dispose; vo->vo_driver.redraw_needed = null_redraw_needed; /* capabilities */ vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, vo->yuv2rgb_cmap); vo->caller = userData; vo->frameDis = frameDisplayFunc; /* return ( vo_driver_t*) vo; */ - return vo_new_port( xine, ( vo_driver_t* )vo, 1 ); + return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); } #if 0 static vo_info_t vo_info_null = { 5, XINE_VISUAL_TYPE_FB }; vo_info_t *get_video_out_plugin_info(){ vo_info_null.description = ("xine video output plugin using null device"); return &vo_info_null; } #endif /* this is special for this device */ /** * We know that we will be controled by the XINE LIB++ */ /** * */ int null_is_showing_video( xine_vo_driver_t* self ){ null_driver_t* this = (null_driver_t*)self->driver; return this->m_show_video; } void null_set_show_video( xine_vo_driver_t* self, int show ) { ((null_driver_t*)self->driver)->m_show_video = show; } int null_is_fullscreen( xine_vo_driver_t* self ){ return ((null_driver_t*)self->driver)->m_video_fullscreen; } void null_set_fullscreen( xine_vo_driver_t* self, int screen ){ ((null_driver_t*)self->driver)->m_video_fullscreen = screen; } int null_is_scaling( xine_vo_driver_t* self ){ return ((null_driver_t*)self->driver)->m_is_scaling; } void null_set_videoGamma( xine_vo_driver_t* self , int value ) { ((null_driver_t*) self->driver) ->yuv2rgb_gamma = value; ((null_driver_t*) self->driver) ->yuv2rgb_factory->set_gamma( ((null_driver_t*) self->driver) ->yuv2rgb_factory, value ); } void null_set_scaling( xine_vo_driver_t* self, int scale ) { ((null_driver_t*)self->driver)->m_is_scaling = scale; } void null_set_gui_width( xine_vo_driver_t* self, int width ) { ((null_driver_t*)self->driver)->gui_width = width; } void null_set_gui_height( xine_vo_driver_t* self, int height ) { ((null_driver_t*)self->driver)->gui_height = height; } void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { null_driver_t* this = (null_driver_t*)self->driver; this->bytes_per_pixel = (depth + 7 ) / 8; this->bpp = this->bytes_per_pixel * 8; this->depth = depth; printf("depth %d %d\n", depth, this->bpp); printf("pixeltype %d\n", rgb ); switch ( this->depth ) { case 32: if( rgb == 0 ) this->yuv2rgb_mode = MODE_32_RGB; else this->yuv2rgb_mode = MODE_32_BGR; case 24: if( this->bpp == 32 ) { if( rgb == 0 ) { this->yuv2rgb_mode = MODE_32_RGB; } else { this->yuv2rgb_mode = MODE_32_BGR; } }else{ if( rgb == 0 ) this->yuv2rgb_mode = MODE_24_RGB; else this->yuv2rgb_mode = MODE_24_BGR; }; break; case 16: if( rgb == 0 ) { this->yuv2rgb_mode = MODE_16_RGB; } else { this->yuv2rgb_mode = MODE_16_BGR; } break; case 15: if( rgb == 0 ) { this->yuv2rgb_mode = MODE_15_RGB; |