-rw-r--r-- | noncore/multimedia/opieplayer2/audiowidget.cpp | 9 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/audiowidget.h | 20 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 142 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/playlistwidget.cpp | 5 |
4 files changed, 81 insertions, 95 deletions
diff --git a/noncore/multimedia/opieplayer2/audiowidget.cpp b/noncore/multimedia/opieplayer2/audiowidget.cpp index e21b0b1..6ccf206 100644 --- a/noncore/multimedia/opieplayer2/audiowidget.cpp +++ b/noncore/multimedia/opieplayer2/audiowidget.cpp | |||
@@ -138,28 +138,33 @@ AudioWidget::AudioWidget(QWidget* parent, const char* name, WFlags f) : | |||
138 | // changeTextColor( &songInfo ); | 138 | // changeTextColor( &songInfo ); |
139 | // songInfo.setBackgroundColor( QColor( 167, 212, 167 )); | 139 | // songInfo.setBackgroundColor( QColor( 167, 212, 167 )); |
140 | // songInfo.setFrameStyle( QFrame::NoFrame); | 140 | // songInfo.setFrameStyle( QFrame::NoFrame); |
141 | songInfo.setFrameStyle( QFrame::WinPanel | QFrame::Sunken ); | 141 | songInfo.setFrameStyle( QFrame::WinPanel | QFrame::Sunken ); |
142 | // songInfo.setForegroundColor(Qt::white); | 142 | // songInfo.setForegroundColor(Qt::white); |
143 | 143 | ||
144 | slider.setFixedHeight( 20 ); | 144 | slider.setFixedHeight( 20 ); |
145 | slider.setMinValue( 0 ); | 145 | slider.setMinValue( 0 ); |
146 | slider.setMaxValue( 1 ); | 146 | slider.setMaxValue( 1 ); |
147 | slider.setFocusPolicy( QWidget::NoFocus ); | 147 | slider.setFocusPolicy( QWidget::NoFocus ); |
148 | slider.setBackgroundPixmap( *pixBg ); | 148 | slider.setBackgroundPixmap( *pixBg ); |
149 | 149 | ||
150 | // Config cofg("qpe"); | ||
151 | // cofg.setGroup("Appearance"); | ||
152 | // QColor backgroundcolor = QColor( cofg.readEntry( "Background", "#E5E1D5" ) ); | ||
153 | |||
150 | time.setFocusPolicy( QWidget::NoFocus ); | 154 | time.setFocusPolicy( QWidget::NoFocus ); |
151 | time.setAlignment( Qt::AlignCenter ); | 155 | time.setAlignment( Qt::AlignCenter ); |
152 | time.setFrame(FALSE); | 156 | |
153 | changeTextColor( &time ); | 157 | // time.setFrame(FALSE); |
158 | // changeTextColor( &time ); | ||
154 | 159 | ||
155 | resizeEvent( NULL ); | 160 | resizeEvent( NULL ); |
156 | 161 | ||
157 | connect( mediaPlayerState, SIGNAL( lengthChanged(long) ), this, SLOT( setLength(long) ) ); | 162 | connect( mediaPlayerState, SIGNAL( lengthChanged(long) ), this, SLOT( setLength(long) ) ); |
158 | connect( mediaPlayerState, SIGNAL( viewChanged(char) ), this, SLOT( setView(char) ) ); | 163 | connect( mediaPlayerState, SIGNAL( viewChanged(char) ), this, SLOT( setView(char) ) ); |
159 | connect( mediaPlayerState, SIGNAL( loopingToggled(bool) ), this, SLOT( setLooping(bool) ) ); | 164 | connect( mediaPlayerState, SIGNAL( loopingToggled(bool) ), this, SLOT( setLooping(bool) ) ); |
160 | connect( mediaPlayerState, SIGNAL( playingToggled(bool) ), this, SLOT( setPlaying(bool) ) ); | 165 | connect( mediaPlayerState, SIGNAL( playingToggled(bool) ), this, SLOT( setPlaying(bool) ) ); |
161 | connect( mediaPlayerState, SIGNAL( isSeekableToggled( bool ) ), this, SLOT( setSeekable( bool ) ) ); | 166 | connect( mediaPlayerState, SIGNAL( isSeekableToggled( bool ) ), this, SLOT( setSeekable( bool ) ) ); |
162 | 167 | ||
163 | connect( this, SIGNAL( forwardClicked() ), this, SLOT( skipFor() ) ); | 168 | connect( this, SIGNAL( forwardClicked() ), this, SLOT( skipFor() ) ); |
164 | connect( this, SIGNAL( backClicked() ), this, SLOT( skipBack() ) ); | 169 | connect( this, SIGNAL( backClicked() ), this, SLOT( skipBack() ) ); |
165 | connect( this, SIGNAL( forwardReleased() ), this, SLOT( stopSkip() ) ); | 170 | connect( this, SIGNAL( forwardReleased() ), this, SLOT( stopSkip() ) ); |
diff --git a/noncore/multimedia/opieplayer2/audiowidget.h b/noncore/multimedia/opieplayer2/audiowidget.h index bcd941e..7cb1d79 100644 --- a/noncore/multimedia/opieplayer2/audiowidget.h +++ b/noncore/multimedia/opieplayer2/audiowidget.h | |||
@@ -34,60 +34,44 @@ | |||
34 | #ifndef AUDIO_WIDGET_H | 34 | #ifndef AUDIO_WIDGET_H |
35 | #define AUDIO_WIDGET_H | 35 | #define AUDIO_WIDGET_H |
36 | 36 | ||
37 | #include <qwidget.h> | 37 | #include <qwidget.h> |
38 | #include <qpainter.h> | 38 | #include <qpainter.h> |
39 | #include <qdrawutil.h> | 39 | #include <qdrawutil.h> |
40 | #include <qpixmap.h> | 40 | #include <qpixmap.h> |
41 | #include <qstring.h> | 41 | #include <qstring.h> |
42 | #include <qslider.h> | 42 | #include <qslider.h> |
43 | #include <qframe.h> | 43 | #include <qframe.h> |
44 | #include <qlineedit.h> | 44 | #include <qlineedit.h> |
45 | 45 | ||
46 | #include <opie/oticker.h> | ||
46 | 47 | ||
47 | class QPixmap; | 48 | class QPixmap; |
48 | 49 | ||
49 | namespace { | 50 | namespace { |
50 | 51 | ||
51 | enum AudioButtons { | 52 | enum AudioButtons { |
52 | AudioPlay=0, | 53 | AudioPlay=0, |
53 | AudioStop, | 54 | AudioStop, |
54 | AudioNext, | 55 | AudioNext, |
55 | AudioPrevious, | 56 | AudioPrevious, |
56 | AudioVolumeUp, | 57 | AudioVolumeUp, |
57 | AudioVolumeDown, | 58 | AudioVolumeDown, |
58 | AudioLoop, | 59 | AudioLoop, |
59 | AudioPlayList, | 60 | AudioPlayList, |
60 | AudioForward, | 61 | AudioForward, |
61 | AudioBack | 62 | AudioBack |
62 | }; | 63 | }; |
63 | }; | 64 | }; |
64 | 65 | ||
65 | class Ticker : public QFrame { | ||
66 | Q_OBJECT | ||
67 | |||
68 | public: | ||
69 | Ticker( QWidget* parent=0 ); | ||
70 | ~Ticker(); | ||
71 | void setText( const QString& text ) ; | ||
72 | |||
73 | protected: | ||
74 | void timerEvent( QTimerEvent * ); | ||
75 | void drawContents( QPainter *p ); | ||
76 | private: | ||
77 | QString scrollText; | ||
78 | int pos, pixelLen; | ||
79 | }; | ||
80 | |||
81 | |||
82 | class AudioWidget : public QWidget { | 66 | class AudioWidget : public QWidget { |
83 | Q_OBJECT | 67 | Q_OBJECT |
84 | public: | 68 | public: |
85 | AudioWidget( QWidget* parent=0, const char* name=0, WFlags f=0 ); | 69 | AudioWidget( QWidget* parent=0, const char* name=0, WFlags f=0 ); |
86 | ~AudioWidget(); | 70 | ~AudioWidget(); |
87 | void setTickerText( const QString &text ) { songInfo.setText( text ); } | 71 | void setTickerText( const QString &text ) { songInfo.setText( text ); } |
88 | public slots: | 72 | public slots: |
89 | void updateSlider( long, long ); | 73 | void updateSlider( long, long ); |
90 | void sliderPressed( ); | 74 | void sliderPressed( ); |
91 | void sliderReleased( ); | 75 | void sliderReleased( ); |
92 | void setLooping( bool b) { setToggleButton( AudioLoop, b ); } | 76 | void setLooping( bool b) { setToggleButton( AudioLoop, b ); } |
93 | void setPlaying( bool b) { setToggleButton( AudioPlay, b ); } | 77 | void setPlaying( bool b) { setToggleButton( AudioPlay, b ); } |
@@ -129,22 +113,22 @@ private: | |||
129 | void paintButton( QPainter *p, int i ); | 113 | void paintButton( QPainter *p, int i ); |
130 | int skipDirection; | 114 | int skipDirection; |
131 | QString skin; | 115 | QString skin; |
132 | QPixmap *pixBg; | 116 | QPixmap *pixBg; |
133 | QImage *imgUp; | 117 | QImage *imgUp; |
134 | QImage *imgDn; | 118 | QImage *imgDn; |
135 | QImage *imgButtonMask; | 119 | QImage *imgButtonMask; |
136 | QBitmap *masks[10]; | 120 | QBitmap *masks[10]; |
137 | QPixmap *buttonPixUp[10]; | 121 | QPixmap *buttonPixUp[10]; |
138 | QPixmap *buttonPixDown[10]; | 122 | QPixmap *buttonPixDown[10]; |
139 | 123 | ||
140 | QPixmap *pixmaps[4]; | 124 | QPixmap *pixmaps[4]; |
141 | Ticker songInfo; | 125 | OTicker songInfo; |
142 | QSlider slider; | 126 | QSlider slider; |
143 | QLineEdit time; | 127 | QLineEdit time; |
144 | int xoff, yoff; | 128 | int xoff, yoff; |
145 | bool isStreaming : 1; | 129 | bool isStreaming : 1; |
146 | }; | 130 | }; |
147 | 131 | ||
148 | 132 | ||
149 | #endif // AUDIO_WIDGET_H | 133 | #endif // AUDIO_WIDGET_H |
150 | 134 | ||
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 707efeb..9c285a0 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c | |||
@@ -45,45 +45,45 @@ | |||
45 | #include <pthread.h> | 45 | #include <pthread.h> |
46 | #include "alphablend.h" | 46 | #include "alphablend.h" |
47 | #include "yuv2rgb.h" | 47 | #include "yuv2rgb.h" |
48 | 48 | ||
49 | #define printf(x,...) | 49 | #define printf(x,...) |
50 | 50 | ||
51 | /* | 51 | /* |
52 | #define LOG | 52 | #define LOG |
53 | */ | 53 | */ |
54 | 54 | ||
55 | /* the caller for our event draw handler */ | 55 | /* the caller for our event draw handler */ |
56 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, | 56 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, |
57 | int width, int height,int bytes ); | 57 | int width, int height,int bytes ); |
58 | 58 | ||
59 | typedef struct null_driver_s null_driver_t; | 59 | typedef struct null_driver_s null_driver_t; |
60 | 60 | ||
61 | struct null_driver_s { | 61 | struct null_driver_s { |
62 | xine_vo_driver_t vo_driver; | 62 | xine_vo_driver_t vo_driver; |
63 | 63 | ||
64 | uint32_t m_capabilities; | 64 | uint32_t m_capabilities; |
65 | int m_show_video; | 65 | int m_show_video; |
66 | int m_video_fullscreen; | 66 | int m_video_fullscreen; |
67 | int m_is_scaling; | 67 | int m_is_scaling; |
68 | 68 | ||
69 | int depth, bpp, bytes_per_pixel; | 69 | int depth, bpp, bytes_per_pixel; |
70 | int yuv2rgb_mode; | 70 | int yuv2rgb_mode; |
71 | int yuv2rgb_swap; | 71 | int yuv2rgb_swap; |
72 | int yuv2rgb_gamma; | 72 | int yuv2rgb_gamma; |
73 | uint8_t *yuv2rgb_cmap; | 73 | uint8_t *yuv2rgb_cmap; |
74 | yuv2rgb_factory_t *yuv2rgb_factory; | 74 | yuv2rgb_factory_t *yuv2rgb_factory; |
75 | 75 | ||
76 | vo_overlay_t *overlay; | 76 | vo_overlay_t *overlay; |
77 | vo_scale_t sc; | 77 | vo_scale_t sc; |
78 | 78 | ||
79 | int gui_width; | 79 | int gui_width; |
80 | int gui_height; | 80 | int gui_height; |
81 | int gui_changed; | 81 | int gui_changed; |
82 | 82 | ||
83 | double display_ratio; | 83 | double display_ratio; |
84 | void* caller; | 84 | void* caller; |
85 | display_xine_frame_t frameDis; | 85 | display_xine_frame_t frameDis; |
86 | }; | 86 | }; |
87 | 87 | ||
88 | typedef struct opie_frame_s opie_frame_t; | 88 | typedef struct opie_frame_s opie_frame_t; |
89 | struct opie_frame_s { | 89 | struct opie_frame_s { |
@@ -113,30 +113,30 @@ static uint32_t null_get_capabilities( xine_vo_driver_t *self ){ | |||
113 | } | 113 | } |
114 | 114 | ||
115 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { | 115 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { |
116 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 116 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
117 | 117 | ||
118 | if (!frame->output->m_show_video) { | 118 | if (!frame->output->m_show_video) { |
119 | /* printf("nullvideo: no video\n"); */ | 119 | /* printf("nullvideo: no video\n"); */ |
120 | return; | 120 | return; |
121 | } | 121 | } |
122 | 122 | ||
123 | if (frame->format == XINE_IMGFMT_YV12) { | 123 | if (frame->format == XINE_IMGFMT_YV12) { |
124 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 124 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
125 | src[0], src[1], src[2]); | 125 | src[0], src[1], src[2]); |
126 | } else { | 126 | } else { |
127 | 127 | ||
128 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 128 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
129 | src[0]); | 129 | src[0]); |
130 | } | 130 | } |
131 | 131 | ||
132 | frame->rgb_dst += frame->stripe_inc; | 132 | frame->rgb_dst += frame->stripe_inc; |
133 | } | 133 | } |
134 | 134 | ||
135 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { | 135 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { |
136 | 136 | ||
137 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 137 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
138 | 138 | ||
139 | switch (which_field) { | 139 | switch (which_field) { |
140 | case VO_TOP_FIELD: | 140 | case VO_TOP_FIELD: |
141 | frame->rgb_dst = (uint8_t *)frame->data; | 141 | frame->rgb_dst = (uint8_t *)frame->data; |
142 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; | 142 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; |
@@ -187,26 +187,26 @@ static vo_frame_t* null_alloc_frame( xine_vo_driver_t* self ){ | |||
187 | frame->frame.field = null_frame_field; | 187 | frame->frame.field = null_frame_field; |
188 | frame->frame.dispose = null_frame_dispose; | 188 | frame->frame.dispose = null_frame_dispose; |
189 | 189 | ||
190 | /* | 190 | /* |
191 | * colorspace converter for this frame | 191 | * colorspace converter for this frame |
192 | */ | 192 | */ |
193 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); | 193 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); |
194 | 194 | ||
195 | return (vo_frame_t*) frame; | 195 | return (vo_frame_t*) frame; |
196 | } | 196 | } |
197 | 197 | ||
198 | static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, | 198 | static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, |
199 | uint32_t width, uint32_t height, | 199 | uint32_t width, uint32_t height, |
200 | int ratio_code, int format, int flags ){ | 200 | int ratio_code, int format, int flags ){ |
201 | null_driver_t* this = (null_driver_t*) self; | 201 | null_driver_t* this = (null_driver_t*) self; |
202 | opie_frame_t* frame = (opie_frame_t*)img; | 202 | opie_frame_t* frame = (opie_frame_t*)img; |
203 | /* not needed now */ | 203 | /* not needed now */ |
204 | 204 | ||
205 | #ifdef LOG | 205 | #ifdef LOG |
206 | fprintf (stderr, "nullvideo: update_frame_format\n"); | 206 | fprintf (stderr, "nullvideo: update_frame_format\n"); |
207 | #endif | 207 | #endif |
208 | 208 | ||
209 | flags &= VO_BOTH_FIELDS; | 209 | flags &= VO_BOTH_FIELDS; |
210 | 210 | ||
211 | /* find out if we need to adapt this frame */ | 211 | /* find out if we need to adapt this frame */ |
212 | 212 | ||
@@ -223,100 +223,100 @@ static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, | |||
223 | frame->sc.delivered_height = height; | 223 | frame->sc.delivered_height = height; |
224 | frame->sc.delivered_ratio_code = ratio_code; | 224 | frame->sc.delivered_ratio_code = ratio_code; |
225 | frame->flags = flags; | 225 | frame->flags = flags; |
226 | frame->format = format; | 226 | frame->format = format; |
227 | frame->sc.user_ratio = this->sc.user_ratio; | 227 | frame->sc.user_ratio = this->sc.user_ratio; |
228 | frame->sc.gui_width = this->gui_width; | 228 | frame->sc.gui_width = this->gui_width; |
229 | frame->sc.gui_height = this->gui_height; | 229 | frame->sc.gui_height = this->gui_height; |
230 | frame->sc.gui_pixel_aspect = 1.0; | 230 | frame->sc.gui_pixel_aspect = 1.0; |
231 | 231 | ||
232 | vo_scale_compute_ideal_size ( &frame->sc ); | 232 | vo_scale_compute_ideal_size ( &frame->sc ); |
233 | vo_scale_compute_output_size( &frame->sc ); | 233 | vo_scale_compute_output_size( &frame->sc ); |
234 | 234 | ||
235 | #ifdef LOG | 235 | #ifdef LOG |
236 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | 236 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", |
237 | frame->sc.gui_width, frame->sc.gui_height, | 237 | frame->sc.gui_width, frame->sc.gui_height, |
238 | frame->sc.delivered_width, frame->sc.delivered_height, | 238 | frame->sc.delivered_width, frame->sc.delivered_height, |
239 | frame->sc.output_width, frame->sc.output_height); | 239 | frame->sc.output_width, frame->sc.output_height); |
240 | #endif | 240 | #endif |
241 | 241 | ||
242 | /* | 242 | /* |
243 | * (re-) allocate | 243 | * (re-) allocate |
244 | */ | 244 | */ |
245 | if( frame->data ) { | 245 | if( frame->data ) { |
246 | if( frame->chunk[0] ){ | 246 | if( frame->chunk[0] ){ |
247 | free( frame->chunk[0] ); | 247 | free( frame->chunk[0] ); |
248 | frame->chunk[0] = NULL; | 248 | frame->chunk[0] = NULL; |
249 | } | 249 | } |
250 | if( frame->chunk[1] ){ | 250 | if( frame->chunk[1] ){ |
251 | free ( frame->chunk[1] ); | 251 | free ( frame->chunk[1] ); |
252 | frame->chunk[1] = NULL; | 252 | frame->chunk[1] = NULL; |
253 | } | 253 | } |
254 | if( frame->chunk[2] ){ | 254 | if( frame->chunk[2] ){ |
255 | free ( frame->chunk[2] ); | 255 | free ( frame->chunk[2] ); |
256 | frame->chunk[2] = NULL; | 256 | frame->chunk[2] = NULL; |
257 | } | 257 | } |
258 | free ( frame->data ); | 258 | free ( frame->data ); |
259 | } | 259 | } |
260 | 260 | ||
261 | frame->data = xine_xmalloc (frame->sc.output_width | 261 | frame->data = xine_xmalloc (frame->sc.output_width |
262 | * frame->sc.output_height | 262 | * frame->sc.output_height |
263 | * this->bytes_per_pixel ); | 263 | * this->bytes_per_pixel ); |
264 | 264 | ||
265 | if( format == XINE_IMGFMT_YV12 ) { | 265 | if( format == XINE_IMGFMT_YV12 ) { |
266 | frame->frame.pitches[0] = 8*((width + 7) / 8); | 266 | frame->frame.pitches[0] = 8*((width + 7) / 8); |
267 | frame->frame.pitches[1] = 8*((width + 15) / 16); | 267 | frame->frame.pitches[1] = 8*((width + 15) / 16); |
268 | frame->frame.pitches[2] = 8*((width + 15) / 16); | 268 | frame->frame.pitches[2] = 8*((width + 15) / 16); |
269 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); | 269 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); |
270 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); | 270 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); |
271 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); | 271 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); |
272 | 272 | ||
273 | }else{ | 273 | }else{ |
274 | frame->frame.pitches[0] = 8*((width + 3) / 4); | 274 | frame->frame.pitches[0] = 8*((width + 3) / 4); |
275 | 275 | ||
276 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, | 276 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, |
277 | (void **)&frame->chunk[0]); | 277 | (void **)&frame->chunk[0]); |
278 | frame->chunk[1] = NULL; | 278 | frame->chunk[1] = NULL; |
279 | frame->chunk[2] = NULL; | 279 | frame->chunk[2] = NULL; |
280 | } | 280 | } |
281 | 281 | ||
282 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; | 282 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; |
283 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; | 283 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; |
284 | 284 | ||
285 | /* | 285 | /* |
286 | * set up colorspace converter | 286 | * set up colorspace converter |
287 | */ | 287 | */ |
288 | 288 | ||
289 | switch (flags) { | 289 | switch (flags) { |
290 | case VO_TOP_FIELD: | 290 | case VO_TOP_FIELD: |
291 | case VO_BOTTOM_FIELD: | 291 | case VO_BOTTOM_FIELD: |
292 | frame->yuv2rgb->configure (frame->yuv2rgb, | 292 | frame->yuv2rgb->configure (frame->yuv2rgb, |
293 | frame->sc.delivered_width, | 293 | frame->sc.delivered_width, |
294 | 16, | 294 | 16, |
295 | 2*frame->frame.pitches[0], | 295 | 2*frame->frame.pitches[0], |
296 | 2*frame->frame.pitches[1], | 296 | 2*frame->frame.pitches[1], |
297 | frame->sc.output_width, | 297 | frame->sc.output_width, |
298 | frame->stripe_height, | 298 | frame->stripe_height, |
299 | frame->bytes_per_line*2); | 299 | frame->bytes_per_line*2); |
300 | frame->yuv_stride = frame->bytes_per_line*2; | 300 | frame->yuv_stride = frame->bytes_per_line*2; |
301 | break; | 301 | break; |
302 | case VO_BOTH_FIELDS: | 302 | case VO_BOTH_FIELDS: |
303 | frame->yuv2rgb->configure (frame->yuv2rgb, | 303 | frame->yuv2rgb->configure (frame->yuv2rgb, |
304 | frame->sc.delivered_width, | 304 | frame->sc.delivered_width, |
305 | 16, | 305 | 16, |
306 | frame->frame.pitches[0], | 306 | frame->frame.pitches[0], |
307 | frame->frame.pitches[1], | 307 | frame->frame.pitches[1], |
308 | frame->sc.output_width, | 308 | frame->sc.output_width, |
309 | frame->stripe_height, | 309 | frame->stripe_height, |
310 | frame->bytes_per_line); | 310 | frame->bytes_per_line); |
311 | frame->yuv_stride = frame->bytes_per_line; | 311 | frame->yuv_stride = frame->bytes_per_line; |
312 | break; | 312 | break; |
313 | } | 313 | } |
314 | #ifdef LOG | 314 | #ifdef LOG |
315 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | 315 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); |
316 | #endif | 316 | #endif |
317 | } | 317 | } |
318 | 318 | ||
319 | /* | 319 | /* |
320 | * reset dest pointers | 320 | * reset dest pointers |
321 | */ | 321 | */ |
322 | 322 | ||
@@ -339,126 +339,126 @@ static void null_update_frame_format( xine_vo_driver_t* self, vo_frame_t* img, | |||
339 | } | 339 | } |
340 | 340 | ||
341 | static void null_display_frame( xine_vo_driver_t* self, vo_frame_t *frame_gen ){ | 341 | static void null_display_frame( xine_vo_driver_t* self, vo_frame_t *frame_gen ){ |
342 | null_driver_t* this = (null_driver_t*) self; | 342 | null_driver_t* this = (null_driver_t*) self; |
343 | opie_frame_t* frame = (opie_frame_t*)frame_gen; | 343 | opie_frame_t* frame = (opie_frame_t*)frame_gen; |
344 | display_xine_frame_t display = this->frameDis; | 344 | display_xine_frame_t display = this->frameDis; |
345 | 345 | ||
346 | if (!this->m_show_video) | 346 | if (!this->m_show_video) |
347 | return; | 347 | return; |
348 | 348 | ||
349 | if( display != NULL ) { | 349 | if( display != NULL ) { |
350 | (*display)(this->caller, frame->data, | 350 | (*display)(this->caller, frame->data, |
351 | frame->sc.output_width, frame->sc.output_height, | 351 | frame->sc.output_width, frame->sc.output_height, |
352 | frame->bytes_per_line ); | 352 | frame->bytes_per_line ); |
353 | } | 353 | } |
354 | 354 | ||
355 | frame->frame.displayed (&frame->frame); | 355 | frame->frame.displayed (&frame->frame); |
356 | } | 356 | } |
357 | 357 | ||
358 | 358 | ||
359 | /* blending related */ | 359 | /* blending related */ |
360 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, | 360 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, |
361 | vo_overlay_t *overlay, | 361 | vo_overlay_t *overlay, |
362 | opie_frame_t *frame) { | 362 | opie_frame_t *frame) { |
363 | int i; | 363 | int i; |
364 | clut_t* clut = (clut_t*) overlay->color; | 364 | clut_t* clut = (clut_t*) overlay->color; |
365 | if (!overlay->rgb_clut) { | 365 | if (!overlay->rgb_clut) { |
366 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 366 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
367 | *((uint32_t *)&clut[i]) = | 367 | *((uint32_t *)&clut[i]) = |
368 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, | 368 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, |
369 | clut[i].y, clut[i].cb, clut[i].cr); | 369 | clut[i].y, clut[i].cb, clut[i].cr); |
370 | } | 370 | } |
371 | overlay->rgb_clut++; | 371 | overlay->rgb_clut++; |
372 | } | 372 | } |
373 | if (!overlay->clip_rgb_clut) { | 373 | if (!overlay->clip_rgb_clut) { |
374 | clut = (clut_t*) overlay->clip_color; | 374 | clut = (clut_t*) overlay->clip_color; |
375 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 375 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
376 | *((uint32_t *)&clut[i]) = | 376 | *((uint32_t *)&clut[i]) = |
377 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, | 377 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, |
378 | clut[i].y, clut[i].cb, clut[i].cr); | 378 | clut[i].y, clut[i].cb, clut[i].cr); |
379 | } | 379 | } |
380 | overlay->clip_rgb_clut++; | 380 | overlay->clip_rgb_clut++; |
381 | } | 381 | } |
382 | } | 382 | } |
383 | 383 | ||
384 | static void null_overlay_blend ( xine_vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { | 384 | static void null_overlay_blend ( xine_vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { |
385 | null_driver_t *this = (null_driver_t *) this_gen; | 385 | null_driver_t *this = (null_driver_t *) this_gen; |
386 | opie_frame_t *frame = (opie_frame_t *) frame_gen; | 386 | opie_frame_t *frame = (opie_frame_t *) frame_gen; |
387 | 387 | ||
388 | if(!this->m_show_video || frame->sc.output_width == 0 | 388 | if(!this->m_show_video || frame->sc.output_width == 0 |
389 | || frame->sc.output_height== 0) | 389 | || frame->sc.output_height== 0) |
390 | return; | 390 | return; |
391 | 391 | ||
392 | /* Alpha Blend here */ | 392 | /* Alpha Blend here */ |
393 | if (overlay->rle) { | 393 | if (overlay->rle) { |
394 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) | 394 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) |
395 | null_overlay_clut_yuv2rgb(this,overlay,frame); | 395 | null_overlay_clut_yuv2rgb(this,overlay,frame); |
396 | 396 | ||
397 | switch(this->bpp) { | 397 | switch(this->bpp) { |
398 | case 16: | 398 | case 16: |
399 | blend_rgb16( (uint8_t *)frame->data, overlay, | 399 | blend_rgb16( (uint8_t *)frame->data, overlay, |
400 | frame->sc.output_width, frame->sc.output_height, | 400 | frame->sc.output_width, frame->sc.output_height, |
401 | frame->sc.delivered_width, frame->sc.delivered_height); | 401 | frame->sc.delivered_width, frame->sc.delivered_height); |
402 | break; | 402 | break; |
403 | case 24: | 403 | case 24: |
404 | blend_rgb24( (uint8_t *)frame->data, overlay, | 404 | blend_rgb24( (uint8_t *)frame->data, overlay, |
405 | frame->sc.output_width, frame->sc.output_height, | 405 | frame->sc.output_width, frame->sc.output_height, |
406 | frame->sc.delivered_width, frame->sc.delivered_height); | 406 | frame->sc.delivered_width, frame->sc.delivered_height); |
407 | break; | 407 | break; |
408 | case 32: | 408 | case 32: |
409 | blend_rgb32( (uint8_t *)frame->data, overlay, | 409 | blend_rgb32( (uint8_t *)frame->data, overlay, |
410 | frame->sc.output_width, frame->sc.output_height, | 410 | frame->sc.output_width, frame->sc.output_height, |
411 | frame->sc.delivered_width, frame->sc.delivered_height); | 411 | frame->sc.delivered_width, frame->sc.delivered_height); |
412 | break; | 412 | break; |
413 | default: | 413 | default: |
414 | /* It should never get here */ | 414 | /* It should never get here */ |
415 | break; | 415 | break; |
416 | } | 416 | } |
417 | } | 417 | } |
418 | } | 418 | } |
419 | 419 | ||
420 | 420 | ||
421 | static int null_get_property( xine_vo_driver_t* self, | 421 | static int null_get_property( xine_vo_driver_t* self, |
422 | int property ){ | 422 | int property ){ |
423 | return 0; | 423 | return 0; |
424 | } | 424 | } |
425 | static int null_set_property( xine_vo_driver_t* self, | 425 | static int null_set_property( xine_vo_driver_t* self, |
426 | int property, | 426 | int property, |
427 | int value ){ | 427 | int value ){ |
428 | return value; | 428 | return value; |
429 | } | 429 | } |
430 | static void null_get_property_min_max( xine_vo_driver_t* self, | 430 | static void null_get_property_min_max( xine_vo_driver_t* self, |
431 | int property, int *min, | 431 | int property, int *min, |
432 | int *max ){ | 432 | int *max ){ |
433 | *max = 0; | 433 | *max = 0; |
434 | *min = 0; | 434 | *min = 0; |
435 | } | 435 | } |
436 | static int null_gui_data_exchange( xine_vo_driver_t* self, | 436 | static int null_gui_data_exchange( xine_vo_driver_t* self, |
437 | int data_type, | 437 | int data_type, |
438 | void *data ){ | 438 | void *data ){ |
439 | return 0; | 439 | return 0; |
440 | } | 440 | } |
441 | 441 | ||
442 | static void null_dispose ( xine_vo_driver_t* self ){ | 442 | static void null_dispose ( xine_vo_driver_t* self ){ |
443 | null_driver_t* this = (null_driver_t*)self; | 443 | null_driver_t* this = (null_driver_t*)self; |
444 | free ( this ); | 444 | free ( this ); |
445 | } | 445 | } |
446 | static int null_redraw_needed( xine_vo_driver_t* self ){ | 446 | static int null_redraw_needed( xine_vo_driver_t* self ){ |
447 | return 0; | 447 | return 0; |
448 | } | 448 | } |
449 | 449 | ||
450 | 450 | ||
451 | xine_vo_driver_t* init_video_out_plugin( config_values_t* conf, | 451 | xine_vo_driver_t* init_video_out_plugin( config_values_t* conf, |
452 | void* video ){ | 452 | void* video ){ |
453 | null_driver_t *vo; | 453 | null_driver_t *vo; |
454 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); | 454 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); |
455 | 455 | ||
456 | /* memset? */ | 456 | /* memset? */ |
457 | memset(vo,0, sizeof(null_driver_t ) ); | 457 | memset(vo,0, sizeof(null_driver_t ) ); |
458 | 458 | ||
459 | vo_scale_init (&vo->sc, 0, 0); | 459 | vo_scale_init (&vo->sc, 0, 0); |
460 | 460 | ||
461 | vo->sc.gui_pixel_aspect = 1.0; | 461 | vo->sc.gui_pixel_aspect = 1.0; |
462 | 462 | ||
463 | vo->m_show_video = 0; // false | 463 | vo->m_show_video = 0; // false |
464 | vo->m_video_fullscreen = 0; | 464 | vo->m_video_fullscreen = 0; |
@@ -476,25 +476,25 @@ xine_vo_driver_t* init_video_out_plugin( config_values_t* conf, | |||
476 | vo->vo_driver.overlay_blend = null_overlay_blend; | 476 | vo->vo_driver.overlay_blend = null_overlay_blend; |
477 | vo->vo_driver.get_property = null_get_property; | 477 | vo->vo_driver.get_property = null_get_property; |
478 | vo->vo_driver.set_property = null_set_property; | 478 | vo->vo_driver.set_property = null_set_property; |
479 | vo->vo_driver.get_property_min_max = null_get_property_min_max; | 479 | vo->vo_driver.get_property_min_max = null_get_property_min_max; |
480 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; | 480 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; |
481 | vo->vo_driver.dispose = null_dispose; | 481 | vo->vo_driver.dispose = null_dispose; |
482 | vo->vo_driver.redraw_needed = null_redraw_needed; | 482 | vo->vo_driver.redraw_needed = null_redraw_needed; |
483 | 483 | ||
484 | 484 | ||
485 | /* capabilities */ | 485 | /* capabilities */ |
486 | vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; | 486 | vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; |
487 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, | 487 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, |
488 | vo->yuv2rgb_cmap); | 488 | vo->yuv2rgb_cmap); |
489 | 489 | ||
490 | return ( xine_vo_driver_t*) vo; | 490 | return ( xine_vo_driver_t*) vo; |
491 | } | 491 | } |
492 | 492 | ||
493 | #if 0 | 493 | #if 0 |
494 | static vo_info_t vo_info_null = { | 494 | static vo_info_t vo_info_null = { |
495 | 5, | 495 | 5, |
496 | XINE_VISUAL_TYPE_FB | 496 | XINE_VISUAL_TYPE_FB |
497 | }; | 497 | }; |
498 | 498 | ||
499 | vo_info_t *get_video_out_plugin_info(){ | 499 | vo_info_t *get_video_out_plugin_info(){ |
500 | vo_info_null.description = ("xine video output plugin using null device"); | 500 | vo_info_null.description = ("xine video output plugin using null device"); |
@@ -549,38 +549,38 @@ void null_set_gui_height( xine_vo_driver_t* self, int height ) { | |||
549 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | 549 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { |
550 | null_driver_t* this = (null_driver_t*)self; | 550 | null_driver_t* this = (null_driver_t*)self; |
551 | 551 | ||
552 | this->bytes_per_pixel = (depth + 7 ) / 8; | 552 | this->bytes_per_pixel = (depth + 7 ) / 8; |
553 | this->bpp = this->bytes_per_pixel * 8; | 553 | this->bpp = this->bytes_per_pixel * 8; |
554 | this->depth = depth; | 554 | this->depth = depth; |
555 | printf("depth %d %d\n", depth, this->bpp); | 555 | printf("depth %d %d\n", depth, this->bpp); |
556 | printf("pixeltype %d\n", rgb ); | 556 | printf("pixeltype %d\n", rgb ); |
557 | switch ( this->depth ) { | 557 | switch ( this->depth ) { |
558 | case 32: | 558 | case 32: |
559 | if( rgb == 0 ) | 559 | if( rgb == 0 ) |
560 | this->yuv2rgb_mode = MODE_32_RGB; | 560 | this->yuv2rgb_mode = MODE_32_RGB; |
561 | else | 561 | else |
562 | this->yuv2rgb_mode = MODE_32_BGR; | 562 | this->yuv2rgb_mode = MODE_32_BGR; |
563 | case 24: | 563 | case 24: |
564 | if( this->bpp == 32 ) { | 564 | if( this->bpp == 32 ) { |
565 | if( rgb == 0 ) { | 565 | if( rgb == 0 ) { |
566 | this->yuv2rgb_mode = MODE_32_RGB; | 566 | this->yuv2rgb_mode = MODE_32_RGB; |
567 | } else { | 567 | } else { |
568 | this->yuv2rgb_mode = MODE_32_BGR; | 568 | this->yuv2rgb_mode = MODE_32_BGR; |
569 | } | 569 | } |
570 | }else{ | 570 | }else{ |
571 | if( rgb == 0 ) | 571 | if( rgb == 0 ) |
572 | this->yuv2rgb_mode = MODE_24_RGB; | 572 | this->yuv2rgb_mode = MODE_24_RGB; |
573 | else | 573 | else |
574 | this->yuv2rgb_mode = MODE_24_BGR; | 574 | this->yuv2rgb_mode = MODE_24_BGR; |
575 | }; | 575 | }; |
576 | break; | 576 | break; |
577 | case 16: | 577 | case 16: |
578 | if( rgb == 0 ) { | 578 | if( rgb == 0 ) { |
579 | this->yuv2rgb_mode = MODE_16_RGB; | 579 | this->yuv2rgb_mode = MODE_16_RGB; |
580 | } else { | 580 | } else { |
581 | this->yuv2rgb_mode = MODE_16_BGR; | 581 | this->yuv2rgb_mode = MODE_16_BGR; |
582 | } | 582 | } |
583 | break; | 583 | break; |
584 | case 15: | 584 | case 15: |
585 | if( rgb == 0 ) { | 585 | if( rgb == 0 ) { |
586 | this->yuv2rgb_mode = MODE_15_RGB; | 586 | this->yuv2rgb_mode = MODE_15_RGB; |
@@ -589,22 +589,22 @@ void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | |||
589 | } | 589 | } |
590 | break; | 590 | break; |
591 | case 8: | 591 | case 8: |
592 | if( rgb == 0 ) { | 592 | if( rgb == 0 ) { |
593 | this->yuv2rgb_mode = MODE_8_RGB; | 593 | this->yuv2rgb_mode = MODE_8_RGB; |
594 | } else { | 594 | } else { |
595 | this->yuv2rgb_mode = MODE_8_BGR; | 595 | this->yuv2rgb_mode = MODE_8_BGR; |
596 | } | 596 | } |
597 | break; | 597 | break; |
598 | }; | 598 | }; |
599 | //free(this->yuv2rgb_factory ); | 599 | //free(this->yuv2rgb_factory ); |
600 | // this->yuv2rgb_factory = yuv2rgb_factory_init (this->yuv2rgb_mode, this->yuv2rgb_swap, | 600 | // this->yuv2rgb_factory = yuv2rgb_factory_init (this->yuv2rgb_mode, this->yuv2rgb_swap, |
601 | // this->yuv2rgb_cmap); | 601 | // this->yuv2rgb_cmap); |
602 | }; | 602 | }; |
603 | 603 | ||
604 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, | 604 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, |
605 | void* user_data ) { | 605 | void* user_data ) { |
606 | null_driver_t* this = (null_driver_t*) self; | 606 | null_driver_t* this = (null_driver_t*) self; |
607 | this->caller = user_data; | 607 | this->caller = user_data; |
608 | this->frameDis = t; | 608 | this->frameDis = t; |
609 | } | 609 | } |
610 | 610 | ||
diff --git a/noncore/multimedia/opieplayer2/playlistwidget.cpp b/noncore/multimedia/opieplayer2/playlistwidget.cpp index 040ef71..8e88e9b 100644 --- a/noncore/multimedia/opieplayer2/playlistwidget.cpp +++ b/noncore/multimedia/opieplayer2/playlistwidget.cpp | |||
@@ -782,28 +782,25 @@ void PlayListWidget::readm3u( const QString &filename ) { | |||
782 | 782 | ||
783 | Om3u *m3uList; | 783 | Om3u *m3uList; |
784 | QString s, name; | 784 | QString s, name; |
785 | m3uList = new Om3u( filename, IO_ReadOnly ); | 785 | m3uList = new Om3u( filename, IO_ReadOnly ); |
786 | m3uList->readM3u(); | 786 | m3uList->readM3u(); |
787 | DocLnk lnk; | 787 | DocLnk lnk; |
788 | for ( QStringList::ConstIterator it = m3uList->begin(); it != m3uList->end(); ++it ) { | 788 | for ( QStringList::ConstIterator it = m3uList->begin(); it != m3uList->end(); ++it ) { |
789 | s = *it; | 789 | s = *it; |
790 | // qDebug("reading "+ s); | 790 | // qDebug("reading "+ s); |
791 | if(s.left(4)=="http") { | 791 | if(s.left(4)=="http") { |
792 | lnk.setName( s ); //sets file name | 792 | lnk.setName( s ); //sets file name |
793 | lnk.setIcon("opieplayer2/musicfile"); | 793 | lnk.setIcon("opieplayer2/musicfile"); |
794 | // if(s.right(4) != '.' || s.right(5) != '.') | 794 | lnk.setFile( s ); //sets file name |
795 | // lnk.setFile( s+"/"); //if url with no extension | ||
796 | // else | ||
797 | lnk.setFile( s ); //sets file name | ||
798 | 795 | ||
799 | } else { | 796 | } else { |
800 | // if( QFileInfo( s ).exists() ) { | 797 | // if( QFileInfo( s ).exists() ) { |
801 | lnk.setName( QFileInfo(s).baseName()); | 798 | lnk.setName( QFileInfo(s).baseName()); |
802 | // if(s.right(4) == '.') {//if regular file | 799 | // if(s.right(4) == '.') {//if regular file |
803 | if(s.left(1) != "/") { | 800 | if(s.left(1) != "/") { |
804 | // qDebug("set link "+QFileInfo(filename).dirPath()+"/"+s); | 801 | // qDebug("set link "+QFileInfo(filename).dirPath()+"/"+s); |
805 | lnk.setFile( QFileInfo(filename).dirPath()+"/"+s); | 802 | lnk.setFile( QFileInfo(filename).dirPath()+"/"+s); |
806 | // lnk.setIcon(MimeType(s).pixmap() ); | 803 | // lnk.setIcon(MimeType(s).pixmap() ); |
807 | // lnk.setIcon("SoundPlayer"); | 804 | // lnk.setIcon("SoundPlayer"); |
808 | } else { | 805 | } else { |
809 | // qDebug("set link2 "+s); | 806 | // qDebug("set link2 "+s); |