author | llornkcor <llornkcor> | 2002-11-10 19:01:52 (UTC) |
---|---|---|
committer | llornkcor <llornkcor> | 2002-11-10 19:01:52 (UTC) |
commit | 7c012ee8cd16d8befacc6f6750711443fac0fd5e (patch) (side-by-side diff) | |
tree | daa73f288c2dc74fda7a64822649f8bd81724aee | |
parent | 6078687cb1e389751503ae171ed1bea72846a4de (diff) | |
download | opie-7c012ee8cd16d8befacc6f6750711443fac0fd5e.zip opie-7c012ee8cd16d8befacc6f6750711443fac0fd5e.tar.gz opie-7c012ee8cd16d8befacc6f6750711443fac0fd5e.tar.bz2 |
*** empty log message ***
-rw-r--r-- | noncore/multimedia/opieplayer2/audiowidget.cpp | 9 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/audiowidget.h | 20 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 142 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/playlistwidget.cpp | 5 |
4 files changed, 81 insertions, 95 deletions
diff --git a/noncore/multimedia/opieplayer2/audiowidget.cpp b/noncore/multimedia/opieplayer2/audiowidget.cpp index e21b0b1..6ccf206 100644 --- a/noncore/multimedia/opieplayer2/audiowidget.cpp +++ b/noncore/multimedia/opieplayer2/audiowidget.cpp @@ -138,28 +138,33 @@ AudioWidget::AudioWidget(QWidget* parent, const char* name, WFlags f) : // changeTextColor( &songInfo ); // songInfo.setBackgroundColor( QColor( 167, 212, 167 )); // songInfo.setFrameStyle( QFrame::NoFrame); songInfo.setFrameStyle( QFrame::WinPanel | QFrame::Sunken ); // songInfo.setForegroundColor(Qt::white); slider.setFixedHeight( 20 ); slider.setMinValue( 0 ); slider.setMaxValue( 1 ); slider.setFocusPolicy( QWidget::NoFocus ); slider.setBackgroundPixmap( *pixBg ); +// Config cofg("qpe"); +// cofg.setGroup("Appearance"); +// QColor backgroundcolor = QColor( cofg.readEntry( "Background", "#E5E1D5" ) ); + time.setFocusPolicy( QWidget::NoFocus ); time.setAlignment( Qt::AlignCenter ); - time.setFrame(FALSE); - changeTextColor( &time ); + +// time.setFrame(FALSE); +// changeTextColor( &time ); resizeEvent( NULL ); connect( mediaPlayerState, SIGNAL( lengthChanged(long) ), this, SLOT( setLength(long) ) ); connect( mediaPlayerState, SIGNAL( viewChanged(char) ), this, SLOT( setView(char) ) ); connect( mediaPlayerState, SIGNAL( loopingToggled(bool) ), this, SLOT( setLooping(bool) ) ); connect( mediaPlayerState, SIGNAL( playingToggled(bool) ), this, SLOT( setPlaying(bool) ) ); connect( mediaPlayerState, SIGNAL( isSeekableToggled( bool ) ), this, SLOT( setSeekable( bool ) ) ); connect( this, SIGNAL( forwardClicked() ), this, SLOT( skipFor() ) ); connect( this, SIGNAL( backClicked() ), this, SLOT( skipBack() ) ); connect( this, SIGNAL( forwardReleased() ), this, SLOT( stopSkip() ) ); diff --git a/noncore/multimedia/opieplayer2/audiowidget.h b/noncore/multimedia/opieplayer2/audiowidget.h index bcd941e..7cb1d79 100644 --- a/noncore/multimedia/opieplayer2/audiowidget.h +++ b/noncore/multimedia/opieplayer2/audiowidget.h @@ -34,60 +34,44 @@ #ifndef AUDIO_WIDGET_H #define AUDIO_WIDGET_H #include <qwidget.h> #include <qpainter.h> #include <qdrawutil.h> #include <qpixmap.h> #include <qstring.h> #include <qslider.h> #include <qframe.h> #include <qlineedit.h> +#include <opie/oticker.h> class QPixmap; namespace { enum AudioButtons { AudioPlay=0, AudioStop, AudioNext, AudioPrevious, AudioVolumeUp, AudioVolumeDown, AudioLoop, AudioPlayList, AudioForward, AudioBack }; }; -class Ticker : public QFrame { - Q_OBJECT - -public: - Ticker( QWidget* parent=0 ); - ~Ticker(); - void setText( const QString& text ) ; - -protected: - void timerEvent( QTimerEvent * ); - void drawContents( QPainter *p ); -private: - QString scrollText; - int pos, pixelLen; -}; - - class AudioWidget : public QWidget { Q_OBJECT public: AudioWidget( QWidget* parent=0, const char* name=0, WFlags f=0 ); ~AudioWidget(); void setTickerText( const QString &text ) { songInfo.setText( text ); } public slots: void updateSlider( long, long ); void sliderPressed( ); void sliderReleased( ); void setLooping( bool b) { setToggleButton( AudioLoop, b ); } void setPlaying( bool b) { setToggleButton( AudioPlay, b ); } @@ -129,22 +113,22 @@ private: void paintButton( QPainter *p, int i ); int skipDirection; QString skin; QPixmap *pixBg; QImage *imgUp; QImage *imgDn; QImage *imgButtonMask; QBitmap *masks[10]; QPixmap *buttonPixUp[10]; QPixmap *buttonPixDown[10]; QPixmap *pixmaps[4]; - Ticker songInfo; + OTicker songInfo; QSlider slider; QLineEdit time; int xoff, yoff; bool isStreaming : 1; }; #endif // AUDIO_WIDGET_H diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 707efeb..9c285a0 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c @@ -45,45 +45,45 @@ #include <pthread.h> #include "alphablend.h" #include "yuv2rgb.h" #define printf(x,...) /* #define LOG */ /* the caller for our event draw handler */ typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, - int width, int height,int bytes ); + int width, int height,int bytes ); typedef struct null_driver_s null_driver_t; struct null_driver_s { xine_vo_driver_t vo_driver; uint32_t m_capabilities; int m_show_video; int m_video_fullscreen; int m_is_scaling; int depth, bpp, bytes_per_pixel; int yuv2rgb_mode; int yuv2rgb_swap; int yuv2rgb_gamma; uint8_t *yuv2rgb_cmap; yuv2rgb_factory_t *yuv2rgb_factory; vo_overlay_t *overlay; - vo_scale_t sc; + vo_scale_t sc; int gui_width; int gui_height; int gui_changed; double display_ratio; void* caller; display_xine_frame_t frameDis; }; typedef struct opie_frame_s opie_frame_t; struct opie_frame_s { @@ -113,30 +113,30 @@ static uint32_t null_get_capabilities( xine_vo_driver_t *self ){ } static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { opie_frame_t *frame = (opie_frame_t *) vo_img ; if (!frame->output->m_show_video) { /* printf("nullvideo: no video\n"); */ return; } if (frame->format == XINE_IMGFMT_YV12) { frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, - src[0], src[1], src[2]); + src[0], src[1], src[2]); } else { frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, - src[0]); - } + src[0]); + } frame->rgb_dst += frame->stripe_inc; } static void null_frame_field (vo_frame_t *vo_img, int which_field) { opie_frame_t *frame = (opie_frame_t *) vo_img ; switch (which_field) { case VO_TOP_FIELD: frame->rgb_dst = (uint8_t *)frame->data; frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; @@ -187,26 +187,26 @@ static vo_frame_t* null_alloc_frame( xine_vo_driver_t* self ){ frame->frame.field = null_frame_field; frame->frame.dispose = null_frame_dispose; /* * colorspace converter for this frame */ frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); return (vo_frame_t*) frame; } static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, - uint32_t width, uint32_t height, - int ratio_code, int format, int flags ){ + uint32_t width, uint32_t height, + int ratio_code, int format, int flags ){ null_driver_t* this = (null_driver_t*) self; opie_frame_t* frame = (opie_frame_t*)img; /* not needed now */ #ifdef LOG fprintf (stderr, "nullvideo: update_frame_format\n"); #endif flags &= VO_BOTH_FIELDS; /* find out if we need to adapt this frame */ @@ -223,100 +223,100 @@ static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, frame->sc.delivered_height = height; frame->sc.delivered_ratio_code = ratio_code; frame->flags = flags; frame->format = format; frame->sc.user_ratio = this->sc.user_ratio; frame->sc.gui_width = this->gui_width; frame->sc.gui_height = this->gui_height; frame->sc.gui_pixel_aspect = 1.0; vo_scale_compute_ideal_size ( &frame->sc ); vo_scale_compute_output_size( &frame->sc ); -#ifdef LOG +#ifdef LOG fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", - frame->sc.gui_width, frame->sc.gui_height, - frame->sc.delivered_width, frame->sc.delivered_height, - frame->sc.output_width, frame->sc.output_height); + frame->sc.gui_width, frame->sc.gui_height, + frame->sc.delivered_width, frame->sc.delivered_height, + frame->sc.output_width, frame->sc.output_height); #endif /* * (re-) allocate */ if( frame->data ) { if( frame->chunk[0] ){ - free( frame->chunk[0] ); - frame->chunk[0] = NULL; - } + free( frame->chunk[0] ); + frame->chunk[0] = NULL; + } if( frame->chunk[1] ){ - free ( frame->chunk[1] ); - frame->chunk[1] = NULL; - } + free ( frame->chunk[1] ); + frame->chunk[1] = NULL; + } if( frame->chunk[2] ){ - free ( frame->chunk[2] ); - frame->chunk[2] = NULL; + free ( frame->chunk[2] ); + frame->chunk[2] = NULL; } free ( frame->data ); } frame->data = xine_xmalloc (frame->sc.output_width - * frame->sc.output_height - * this->bytes_per_pixel ); + * frame->sc.output_height + * this->bytes_per_pixel ); if( format == XINE_IMGFMT_YV12 ) { frame->frame.pitches[0] = 8*((width + 7) / 8); frame->frame.pitches[1] = 8*((width + 15) / 16); frame->frame.pitches[2] = 8*((width + 15) / 16); frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); }else{ frame->frame.pitches[0] = 8*((width + 3) / 4); - + frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, - (void **)&frame->chunk[0]); + (void **)&frame->chunk[0]); frame->chunk[1] = NULL; frame->chunk[2] = NULL; } frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; /* * set up colorspace converter */ switch (flags) { case VO_TOP_FIELD: case VO_BOTTOM_FIELD: frame->yuv2rgb->configure (frame->yuv2rgb, - frame->sc.delivered_width, - 16, - 2*frame->frame.pitches[0], - 2*frame->frame.pitches[1], - frame->sc.output_width, - frame->stripe_height, - frame->bytes_per_line*2); + frame->sc.delivered_width, + 16, + 2*frame->frame.pitches[0], + 2*frame->frame.pitches[1], + frame->sc.output_width, + frame->stripe_height, + frame->bytes_per_line*2); frame->yuv_stride = frame->bytes_per_line*2; break; case VO_BOTH_FIELDS: frame->yuv2rgb->configure (frame->yuv2rgb, - frame->sc.delivered_width, - 16, - frame->frame.pitches[0], - frame->frame.pitches[1], - frame->sc.output_width, - frame->stripe_height, - frame->bytes_per_line); + frame->sc.delivered_width, + 16, + frame->frame.pitches[0], + frame->frame.pitches[1], + frame->sc.output_width, + frame->stripe_height, + frame->bytes_per_line); frame->yuv_stride = frame->bytes_per_line; break; } #ifdef LOG fprintf (stderr, "nullvideo: colorspace converter configured.\n"); #endif } /* * reset dest pointers */ @@ -339,126 +339,126 @@ static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, } static void null_display_frame( xine_vo_driver_t* self, vo_frame_t *frame_gen ){ null_driver_t* this = (null_driver_t*) self; opie_frame_t* frame = (opie_frame_t*)frame_gen; display_xine_frame_t display = this->frameDis; if (!this->m_show_video) return; if( display != NULL ) { (*display)(this->caller, frame->data, - frame->sc.output_width, frame->sc.output_height, - frame->bytes_per_line ); + frame->sc.output_width, frame->sc.output_height, + frame->bytes_per_line ); } frame->frame.displayed (&frame->frame); } /* blending related */ static void null_overlay_clut_yuv2rgb (null_driver_t *this, - vo_overlay_t *overlay, - opie_frame_t *frame) { + vo_overlay_t *overlay, + opie_frame_t *frame) { int i; clut_t* clut = (clut_t*) overlay->color; if (!overlay->rgb_clut) { for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { *((uint32_t *)&clut[i]) = - frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, - clut[i].y, clut[i].cb, clut[i].cr); + frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, + clut[i].y, clut[i].cb, clut[i].cr); } overlay->rgb_clut++; } if (!overlay->clip_rgb_clut) { clut = (clut_t*) overlay->clip_color; for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { *((uint32_t *)&clut[i]) = - frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, - clut[i].y, clut[i].cb, clut[i].cr); + frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, + clut[i].y, clut[i].cb, clut[i].cr); } overlay->clip_rgb_clut++; } } static void null_overlay_blend ( xine_vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { null_driver_t *this = (null_driver_t *) this_gen; opie_frame_t *frame = (opie_frame_t *) frame_gen; if(!this->m_show_video || frame->sc.output_width == 0 || frame->sc.output_height== 0) return; /* Alpha Blend here */ if (overlay->rle) { if( !overlay->rgb_clut || !overlay->clip_rgb_clut) null_overlay_clut_yuv2rgb(this,overlay,frame); switch(this->bpp) { case 16: blend_rgb16( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); + frame->sc.output_width, frame->sc.output_height, + frame->sc.delivered_width, frame->sc.delivered_height); break; case 24: blend_rgb24( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); + frame->sc.output_width, frame->sc.output_height, + frame->sc.delivered_width, frame->sc.delivered_height); break; case 32: blend_rgb32( (uint8_t *)frame->data, overlay, - frame->sc.output_width, frame->sc.output_height, - frame->sc.delivered_width, frame->sc.delivered_height); + frame->sc.output_width, frame->sc.output_height, + frame->sc.delivered_width, frame->sc.delivered_height); break; default: /* It should never get here */ break; } } } static int null_get_property( xine_vo_driver_t* self, - int property ){ + int property ){ return 0; } static int null_set_property( xine_vo_driver_t* self, - int property, - int value ){ + int property, + int value ){ return value; } static void null_get_property_min_max( xine_vo_driver_t* self, - int property, int *min, - int *max ){ + int property, int *min, + int *max ){ *max = 0; *min = 0; } static int null_gui_data_exchange( xine_vo_driver_t* self, - int data_type, - void *data ){ + int data_type, + void *data ){ return 0; } static void null_dispose ( xine_vo_driver_t* self ){ null_driver_t* this = (null_driver_t*)self; free ( this ); } static int null_redraw_needed( xine_vo_driver_t* self ){ return 0; } - + xine_vo_driver_t* init_video_out_plugin( config_values_t* conf, - void* video ){ + void* video ){ null_driver_t *vo; vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); /* memset? */ memset(vo,0, sizeof(null_driver_t ) ); vo_scale_init (&vo->sc, 0, 0); vo->sc.gui_pixel_aspect = 1.0; vo->m_show_video = 0; // false vo->m_video_fullscreen = 0; @@ -476,25 +476,25 @@ xine_vo_driver_t* init_video_out_plugin( config_values_t* conf, vo->vo_driver.overlay_blend = null_overlay_blend; vo->vo_driver.get_property = null_get_property; vo->vo_driver.set_property = null_set_property; vo->vo_driver.get_property_min_max = null_get_property_min_max; vo->vo_driver.gui_data_exchange = null_gui_data_exchange; vo->vo_driver.dispose = null_dispose; vo->vo_driver.redraw_needed = null_redraw_needed; /* capabilities */ vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, - vo->yuv2rgb_cmap); + vo->yuv2rgb_cmap); return ( xine_vo_driver_t*) vo; } #if 0 static vo_info_t vo_info_null = { 5, XINE_VISUAL_TYPE_FB }; vo_info_t *get_video_out_plugin_info(){ vo_info_null.description = ("xine video output plugin using null device"); @@ -549,38 +549,38 @@ void null_set_gui_height( xine_vo_driver_t* self, int height ) { void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { null_driver_t* this = (null_driver_t*)self; this->bytes_per_pixel = (depth + 7 ) / 8; this->bpp = this->bytes_per_pixel * 8; this->depth = depth; printf("depth %d %d\n", depth, this->bpp); printf("pixeltype %d\n", rgb ); switch ( this->depth ) { case 32: if( rgb == 0 ) this->yuv2rgb_mode = MODE_32_RGB; - else + else this->yuv2rgb_mode = MODE_32_BGR; - case 24: + case 24: if( this->bpp == 32 ) { - if( rgb == 0 ) { - this->yuv2rgb_mode = MODE_32_RGB; - } else { - this->yuv2rgb_mode = MODE_32_BGR; + if( rgb == 0 ) { + this->yuv2rgb_mode = MODE_32_RGB; + } else { + this->yuv2rgb_mode = MODE_32_BGR; } }else{ if( rgb == 0 ) - this->yuv2rgb_mode = MODE_24_RGB; + this->yuv2rgb_mode = MODE_24_RGB; else - this->yuv2rgb_mode = MODE_24_BGR; + this->yuv2rgb_mode = MODE_24_BGR; }; break; case 16: if( rgb == 0 ) { this->yuv2rgb_mode = MODE_16_RGB; } else { this->yuv2rgb_mode = MODE_16_BGR; } break; case 15: if( rgb == 0 ) { this->yuv2rgb_mode = MODE_15_RGB; @@ -589,22 +589,22 @@ void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { } break; case 8: if( rgb == 0 ) { this->yuv2rgb_mode = MODE_8_RGB; } else { this->yuv2rgb_mode = MODE_8_BGR; } break; }; //free(this->yuv2rgb_factory ); // this->yuv2rgb_factory = yuv2rgb_factory_init (this->yuv2rgb_mode, this->yuv2rgb_swap, - // this->yuv2rgb_cmap); + // this->yuv2rgb_cmap); }; void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, - void* user_data ) { + void* user_data ) { null_driver_t* this = (null_driver_t*) self; this->caller = user_data; this->frameDis = t; } diff --git a/noncore/multimedia/opieplayer2/playlistwidget.cpp b/noncore/multimedia/opieplayer2/playlistwidget.cpp index 040ef71..8e88e9b 100644 --- a/noncore/multimedia/opieplayer2/playlistwidget.cpp +++ b/noncore/multimedia/opieplayer2/playlistwidget.cpp @@ -782,28 +782,25 @@ void PlayListWidget::readm3u( const QString &filename ) { Om3u *m3uList; QString s, name; m3uList = new Om3u( filename, IO_ReadOnly ); m3uList->readM3u(); DocLnk lnk; for ( QStringList::ConstIterator it = m3uList->begin(); it != m3uList->end(); ++it ) { s = *it; // qDebug("reading "+ s); if(s.left(4)=="http") { lnk.setName( s ); //sets file name lnk.setIcon("opieplayer2/musicfile"); -// if(s.right(4) != '.' || s.right(5) != '.') -// lnk.setFile( s+"/"); //if url with no extension -// else - lnk.setFile( s ); //sets file name + lnk.setFile( s ); //sets file name } else { // if( QFileInfo( s ).exists() ) { lnk.setName( QFileInfo(s).baseName()); // if(s.right(4) == '.') {//if regular file if(s.left(1) != "/") { // qDebug("set link "+QFileInfo(filename).dirPath()+"/"+s); lnk.setFile( QFileInfo(filename).dirPath()+"/"+s); // lnk.setIcon(MimeType(s).pixmap() ); // lnk.setIcon("SoundPlayer"); } else { // qDebug("set link2 "+s); |