author | zecke <zecke> | 2004-09-23 19:02:47 (UTC) |
---|---|---|
committer | zecke <zecke> | 2004-09-23 19:02:47 (UTC) |
commit | 9a7e9427062e820f7b654e77e051213c3f53e134 (patch) (unidiff) | |
tree | b37f5d9dd37a846551c44feca40c24c56ffc7a05 | |
parent | 69bf1d25b253167f3d2ef4b162c42aec4d8bbf7a (diff) | |
download | opie-9a7e9427062e820f7b654e77e051213c3f53e134.zip opie-9a7e9427062e820f7b654e77e051213c3f53e134.tar.gz opie-9a7e9427062e820f7b654e77e051213c3f53e134.tar.bz2 |
-OTicker is in libqtaux so we need to link it to avoid weird crashes
-Some function names have changed in xine update them
-Start to merge video_out_fb.c changes into nullvideo and give credit
to the source of it
-rw-r--r-- | noncore/multimedia/opieplayer2/lib.cpp | 15 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 359 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/opieplayer2.pro | 2 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/xinevideowidget.cpp | 1 |
4 files changed, 236 insertions, 141 deletions
diff --git a/noncore/multimedia/opieplayer2/lib.cpp b/noncore/multimedia/opieplayer2/lib.cpp index 1ab5c96..248221b 100644 --- a/noncore/multimedia/opieplayer2/lib.cpp +++ b/noncore/multimedia/opieplayer2/lib.cpp | |||
@@ -41,16 +41,17 @@ using namespace Opie::Core; | |||
41 | 41 | ||
42 | /* QT */ | 42 | /* QT */ |
43 | #include <qtextstream.h> | 43 | #include <qtextstream.h> |
44 | #include <qdir.h> | 44 | #include <qdir.h> |
45 | #include <qgfx_qws.h> | 45 | #include <qgfx_qws.h> |
46 | 46 | ||
47 | /* STD */ | 47 | /* STD */ |
48 | #include <assert.h> | 48 | #include <assert.h> |
49 | #include <unistd.h> | ||
49 | 50 | ||
50 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, | 51 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, |
51 | int width, int height,int bytes ); | 52 | int width, int height,int bytes ); |
52 | 53 | ||
53 | extern "C" { | 54 | extern "C" { |
54 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); | 55 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); |
55 | int null_is_showing_video( const xine_vo_driver_t* self ); | 56 | int null_is_showing_video( const xine_vo_driver_t* self ); |
56 | void null_set_show_video( const xine_vo_driver_t* self, int show ); | 57 | void null_set_show_video( const xine_vo_driver_t* self, int show ); |
@@ -103,20 +104,20 @@ Lib::Lib( InitializationMode initMode, XineVideoWidget* widget ) | |||
103 | m_initialized = true; | 104 | m_initialized = true; |
104 | } | 105 | } |
105 | else | 106 | else |
106 | start(); | 107 | start(); |
107 | } | 108 | } |
108 | 109 | ||
109 | void Lib::run() | 110 | void Lib::run() |
110 | { | 111 | { |
111 | odebug << "Lib::run() started" << oendl; | 112 | odebug << "Lib::run() started" << oendl; |
112 | initialize(); | 113 | initialize(); |
113 | m_initialized = true; | 114 | m_initialized = true; |
114 | odebug << "Lib::run() finished" << oendl; | 115 | odebug << "Lib::run() finished" << oendl; |
115 | } | 116 | } |
116 | 117 | ||
117 | void Lib::initialize() | 118 | void Lib::initialize() |
118 | { | 119 | { |
119 | m_duringInitialization = true; | 120 | m_duringInitialization = true; |
120 | m_xine = xine_new( ); | 121 | m_xine = xine_new( ); |
121 | 122 | ||
122 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; | 123 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; |
@@ -141,17 +142,17 @@ void Lib::initialize() | |||
141 | printf( "!0\n" ); | 142 | printf( "!0\n" ); |
142 | setWidget( m_wid ); | 143 | setWidget( m_wid ); |
143 | } | 144 | } |
144 | 145 | ||
145 | m_queue = xine_event_new_queue (m_stream); | 146 | m_queue = xine_event_new_queue (m_stream); |
146 | 147 | ||
147 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); | 148 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); |
148 | 149 | ||
149 | ::null_preload_decoders( m_stream ); | 150 | ::null_preload_decoders( m_stream ); |
150 | 151 | ||
151 | m_duringInitialization = false; | 152 | m_duringInitialization = false; |
152 | } | 153 | } |
153 | 154 | ||
154 | Lib::~Lib() { | 155 | Lib::~Lib() { |
155 | assert( isRunning() == false ); | 156 | assert( isRunning() == false ); |
156 | assert( m_initialized ); | 157 | assert( m_initialized ); |
157 | 158 | ||
@@ -212,17 +213,17 @@ int Lib::play( const QString& fileName, int startPos, int start_time ) { | |||
212 | return 0; | 213 | return 0; |
213 | } | 214 | } |
214 | return xine_play( m_stream, startPos, start_time); | 215 | return xine_play( m_stream, startPos, start_time); |
215 | } | 216 | } |
216 | 217 | ||
217 | void Lib::stop() { | 218 | void Lib::stop() { |
218 | assert( m_initialized ); | 219 | assert( m_initialized ); |
219 | 220 | ||
220 | odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; | 221 | odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; |
221 | xine_stop( m_stream ); | 222 | xine_stop( m_stream ); |
222 | } | 223 | } |
223 | 224 | ||
224 | void Lib::pause( bool toggle ) { | 225 | void Lib::pause( bool toggle ) { |
225 | assert( m_initialized ); | 226 | assert( m_initialized ); |
226 | 227 | ||
227 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); | 228 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); |
228 | } | 229 | } |
@@ -324,19 +325,19 @@ int Lib::error() const { | |||
324 | return xine_get_error( m_stream ); | 325 | return xine_get_error( m_stream ); |
325 | }; | 326 | }; |
326 | 327 | ||
327 | void Lib::ensureInitialized() | 328 | void Lib::ensureInitialized() |
328 | { | 329 | { |
329 | if ( m_initialized ) | 330 | if ( m_initialized ) |
330 | return; | 331 | return; |
331 | 332 | ||
332 | odebug << "waiting for initialization thread to finish" << oendl; | 333 | odebug << "waiting for initialization thread to finish" << oendl; |
333 | wait(); | 334 | wait(); |
334 | odebug << "initialization thread finished!" << oendl; | 335 | odebug << "initialization thread finished!" << oendl; |
335 | } | 336 | } |
336 | 337 | ||
337 | void Lib::setWidget( XineVideoWidget *widget ) | 338 | void Lib::setWidget( XineVideoWidget *widget ) |
338 | { | 339 | { |
339 | m_wid = widget; | 340 | m_wid = widget; |
340 | resize ( m_wid-> size ( ) ); | 341 | resize ( m_wid-> size ( ) ); |
341 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); | 342 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); |
342 | m_wid->repaint(); | 343 | m_wid->repaint(); |
@@ -419,16 +420,16 @@ void Lib::xine_display_frame( void* user_data, uint8_t *frame, | |||
419 | int width, int height, int bytes ) { | 420 | int width, int height, int bytes ) { |
420 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); | 421 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); |
421 | } | 422 | } |
422 | 423 | ||
423 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { | 424 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { |
424 | assert( m_initialized ); | 425 | assert( m_initialized ); |
425 | 426 | ||
426 | if ( !m_video ) { | 427 | if ( !m_video ) { |
427 | owarn << "not showing video now" << oendl; | 428 | owarn << "not showing video now" << oendl; |
428 | return; | 429 | return; |
429 | } | 430 | } |
430 | 431 | ||
431 | assert( m_wid ); | 432 | assert( m_wid ); |
432 | 433 | ||
433 | m_wid-> setVideoFrame ( frame, width, height, bytes ); | 434 | m_wid-> setVideoFrame ( frame, width, height, bytes ); |
434 | } | 435 | } |
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 378bbd4..6769a37 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c | |||
@@ -1,14 +1,15 @@ | |||
1 | /* | 1 | /* |
2 | This file is part of the Opie Project | 2 | This file is part of the Opie Project |
3 | 3 | ||
4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> | 4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> |
5 | Copyright (c) 2002 LJP <> | 5 | Copyright (c) 2002 LJP <> |
6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> | 6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> |
7 | Copyright (c) 2002-2003 Miguel Freitas of xine | ||
7 | =. | 8 | =. |
8 | .=l. | 9 | .=l. |
9 | .>+-= | 10 | .>+-= |
10 | _;:, .> :=|. This program is free software; you can | 11 | _;:, .> :=|. This program is free software; you can |
11 | .> <`_, > . <= redistribute it and/or modify it under | 12 | .> <`_, > . <= redistribute it and/or modify it under |
12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public | 13 | :`=1 )Y*s>-.-- : the terms of the GNU General Public |
13 | .="- .-=="i, .._ License as published by the Free Software | 14 | .="- .-=="i, .._ License as published by the Free Software |
14 | - . .-<_> .<> Foundation; either version 2 of the License, | 15 | - . .-<_> .<> Foundation; either version 2 of the License, |
@@ -108,17 +109,17 @@ struct opie_frame_s { | |||
108 | null_driver_t *output; | 109 | null_driver_t *output; |
109 | }; | 110 | }; |
110 | 111 | ||
111 | static uint32_t null_get_capabilities( vo_driver_t *self ){ | 112 | static uint32_t null_get_capabilities( vo_driver_t *self ){ |
112 | null_driver_t* this = (null_driver_t*)self; | 113 | null_driver_t* this = (null_driver_t*)self; |
113 | return this->m_capabilities; | 114 | return this->m_capabilities; |
114 | } | 115 | } |
115 | 116 | ||
116 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { | 117 | static void null_frame_proc_slice (vo_frame_t *vo_img, uint8_t **src) { |
117 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 118 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
118 | 119 | ||
119 | vo_img->proc_called = 1; | 120 | vo_img->proc_called = 1; |
120 | 121 | ||
121 | if (!frame->output->m_show_video) { | 122 | if (!frame->output->m_show_video) { |
122 | /* printf("nullvideo: no video\n"); */ | 123 | /* printf("nullvideo: no video\n"); */ |
123 | return; | 124 | return; |
124 | } | 125 | } |
@@ -170,84 +171,66 @@ static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ | |||
170 | 171 | ||
171 | null_driver_t* this = (null_driver_t*)self; | 172 | null_driver_t* this = (null_driver_t*)self; |
172 | opie_frame_t* frame; | 173 | opie_frame_t* frame; |
173 | 174 | ||
174 | #ifdef LOG | 175 | #ifdef LOG |
175 | fprintf (stderr, "nullvideo: alloc_frame\n"); | 176 | fprintf (stderr, "nullvideo: alloc_frame\n"); |
176 | #endif | 177 | #endif |
177 | 178 | ||
178 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); | 179 | frame = (opie_frame_t*)xine_xmalloc ( sizeof(opie_frame_t) ); |
179 | 180 | ||
180 | memset( frame, 0, sizeof( opie_frame_t) ); | ||
181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); | 181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); |
182 | 182 | ||
183 | pthread_mutex_init (&frame->frame.mutex, NULL); | 183 | pthread_mutex_init (&frame->frame.mutex, NULL); |
184 | 184 | ||
185 | frame->output = this; | 185 | frame->output = this; |
186 | 186 | ||
187 | /* initialize the frame*/ | 187 | /* initialize the frame*/ |
188 | frame->frame.driver = self; | 188 | frame->frame.driver = self; |
189 | frame->frame.proc_slice = null_frame_copy; | 189 | frame->frame.proc_slice = null_frame_proc_slice; |
190 | frame->frame.field = null_frame_field; | 190 | frame->frame.field = null_frame_field; |
191 | frame->frame.dispose = null_frame_dispose; | 191 | frame->frame.dispose = null_frame_dispose; |
192 | 192 | ||
193 | /* | 193 | /* |
194 | * colorspace converter for this frame | 194 | * colorspace converter for this frame |
195 | */ | 195 | */ |
196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); | 196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); |
197 | 197 | ||
198 | return (vo_frame_t*) frame; | 198 | return (vo_frame_t*) frame; |
199 | } | 199 | } |
200 | 200 | ||
201 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | ||
202 | uint32_t width, uint32_t height, | ||
203 | double ratio_code, int format, | ||
204 | int flags ){ | ||
205 | null_driver_t* this = (null_driver_t*) self; | ||
206 | opie_frame_t* frame = (opie_frame_t*)img; | ||
207 | /* not needed now */ | ||
208 | 201 | ||
209 | #ifdef LOG | 202 | static void null_frame_compute_ideal_size( null_driver_t *this, |
210 | fprintf (stderr, "nullvideo: update_frame_format\n"); | 203 | opie_frame_t *frame ) { |
211 | #endif | 204 | this = this; |
212 | 205 | ||
213 | flags &= VO_BOTH_FIELDS; | 206 | _x_vo_scale_compute_ideal_size(&frame->sc); |
207 | } | ||
214 | 208 | ||
215 | /* find out if we need to adapt this frame */ | 209 | static void null_frame_compute_rgb_size( null_driver_t *this, |
210 | opie_frame_t *frame ){ | ||
211 | this = this; | ||
216 | 212 | ||
217 | if ((width != frame->sc.delivered_width) | 213 | _x_vo_scale_compute_output_size(&frame->sc); |
218 | || (height != frame->sc.delivered_height) | ||
219 | || (ratio_code != frame->sc.delivered_ratio) | ||
220 | || (flags != frame->flags) | ||
221 | || (format != frame->format) | ||
222 | || (this->sc.user_ratio != frame->sc.user_ratio) | ||
223 | || (this->gui_width != frame->sc.gui_width) | ||
224 | || (this->gui_height != frame->sc.gui_height)) { | ||
225 | 214 | ||
226 | frame->sc.delivered_width = width; | 215 | /* avoid problems in yuv2rgb */ |
227 | frame->sc.delivered_height = height; | 216 | if(frame->sc.output_height < (frame->sc.delivered_height+15) >> 4) |
228 | frame->sc.delivered_ratio = ratio_code; | 217 | frame->sc.output_height = (frame->sc.delivered_height+15) >> 4; |
229 | frame->flags = flags; | ||
230 | frame->format = format; | ||
231 | frame->sc.user_ratio = this->sc.user_ratio; | ||
232 | frame->sc.gui_width = this->gui_width; | ||
233 | frame->sc.gui_height = this->gui_height; | ||
234 | frame->sc.gui_pixel_aspect = 1.0; | ||
235 | 218 | ||
236 | vo_scale_compute_ideal_size ( &frame->sc ); | 219 | if (frame->sc.output_width < 8) |
237 | vo_scale_compute_output_size( &frame->sc ); | 220 | frame->sc.output_width = 8; |
238 | 221 | ||
239 | #ifdef LOG | 222 | /* yuv2rgb_mlib needs an even YUV2 width */ |
240 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | 223 | if (frame->sc.output_width & 1) |
241 | frame->sc.gui_width, frame->sc.gui_height, | 224 | frame->sc.output_width++; |
242 | frame->sc.delivered_width, frame->sc.delivered_height, | 225 | } |
243 | frame->sc.output_width, frame->sc.output_height); | ||
244 | #endif | ||
245 | 226 | ||
227 | static void null_frame_reallocate( null_driver_t *this, opie_frame_t *frame, | ||
228 | uint32_t width, uint32_t height, int format){ | ||
246 | /* | 229 | /* |
247 | * (re-) allocate | 230 | * (re-) allocate |
248 | */ | 231 | */ |
249 | if( frame->data ) { | 232 | if( frame->data ) { |
250 | if( frame->chunk[0] ){ | 233 | if( frame->chunk[0] ){ |
251 | free( frame->chunk[0] ); | 234 | free( frame->chunk[0] ); |
252 | frame->chunk[0] = NULL; | 235 | frame->chunk[0] = NULL; |
253 | } | 236 | } |
@@ -257,17 +240,17 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | |||
257 | } | 240 | } |
258 | if( frame->chunk[2] ){ | 241 | if( frame->chunk[2] ){ |
259 | free ( frame->chunk[2] ); | 242 | free ( frame->chunk[2] ); |
260 | frame->chunk[2] = NULL; | 243 | frame->chunk[2] = NULL; |
261 | } | 244 | } |
262 | free ( frame->data ); | 245 | free ( frame->data ); |
263 | } | 246 | } |
264 | 247 | ||
265 | frame->data = xine_xmalloc (frame->sc.output_width | 248 | frame->data = xine_xmalloc (frame->sc.output_width |
266 | * frame->sc.output_height | 249 | * frame->sc.output_height |
267 | * this->bytes_per_pixel ); | 250 | * this->bytes_per_pixel ); |
268 | 251 | ||
269 | if( format == XINE_IMGFMT_YV12 ) { | 252 | if( format == XINE_IMGFMT_YV12 ) { |
270 | frame->frame.pitches[0] = 8*((width + 7) / 8); | 253 | frame->frame.pitches[0] = 8*((width + 7) / 8); |
271 | frame->frame.pitches[1] = 8*((width + 15) / 16); | 254 | frame->frame.pitches[1] = 8*((width + 15) / 16); |
272 | frame->frame.pitches[2] = 8*((width + 15) / 16); | 255 | frame->frame.pitches[2] = 8*((width + 15) / 16); |
273 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); | 256 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); |
@@ -278,73 +261,126 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | |||
278 | frame->frame.pitches[0] = 8*((width + 3) / 4); | 261 | frame->frame.pitches[0] = 8*((width + 3) / 4); |
279 | 262 | ||
280 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, | 263 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, |
281 | (void **)&frame->chunk[0]); | 264 | (void **)&frame->chunk[0]); |
282 | frame->chunk[1] = NULL; | 265 | frame->chunk[1] = NULL; |
283 | frame->chunk[2] = NULL; | 266 | frame->chunk[2] = NULL; |
284 | } | 267 | } |
285 | 268 | ||
269 | } | ||
270 | |||
271 | static void null_setup_colorspace_converter(opie_frame_t *frame, int flags ) { | ||
272 | switch (flags) { | ||
273 | case VO_TOP_FIELD: | ||
274 | case VO_BOTTOM_FIELD: | ||
275 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
276 | frame->sc.delivered_width, | ||
277 | 16, | ||
278 | 2*frame->frame.pitches[0], | ||
279 | 2*frame->frame.pitches[1], | ||
280 | frame->sc.output_width, | ||
281 | frame->stripe_height, | ||
282 | frame->bytes_per_line*2); | ||
283 | frame->yuv_stride = frame->bytes_per_line*2; | ||
284 | break; | ||
285 | case VO_BOTH_FIELDS: | ||
286 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
287 | frame->sc.delivered_width, | ||
288 | 16, | ||
289 | frame->frame.pitches[0], | ||
290 | frame->frame.pitches[1], | ||
291 | frame->sc.output_width, | ||
292 | frame->stripe_height, | ||
293 | frame->bytes_per_line); | ||
294 | frame->yuv_stride = frame->bytes_per_line; | ||
295 | break; | ||
296 | } | ||
297 | #ifdef LOG | ||
298 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | ||
299 | #endif | ||
300 | } | ||
301 | |||
302 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | ||
303 | uint32_t width, uint32_t height, | ||
304 | double ratio_code, int format, | ||
305 | int flags ){ | ||
306 | null_driver_t* this = (null_driver_t*) self; | ||
307 | opie_frame_t* frame = (opie_frame_t*)img; | ||
308 | |||
309 | #ifdef LOG | ||
310 | fprintf (stderr, "nullvideo: update_frame_format\n"); | ||
311 | #endif | ||
312 | |||
313 | flags &= VO_BOTH_FIELDS; | ||
314 | |||
315 | /* find out if we need to adapt this frame */ | ||
316 | |||
317 | if ((width != frame->sc.delivered_width) | ||
318 | || (height != frame->sc.delivered_height) | ||
319 | || (ratio_code != frame->sc.delivered_ratio) | ||
320 | || (flags != frame->flags) | ||
321 | || (format != frame->format) | ||
322 | || (this->sc.user_ratio != frame->sc.user_ratio) | ||
323 | || (this->gui_width != frame->sc.gui_width) | ||
324 | || (this->gui_height != frame->sc.gui_height)) { | ||
325 | |||
326 | frame->sc.delivered_width = width; | ||
327 | frame->sc.delivered_height = height; | ||
328 | frame->sc.delivered_ratio = ratio_code; | ||
329 | frame->flags = flags; | ||
330 | frame->format = format; | ||
331 | frame->sc.user_ratio = this->sc.user_ratio; | ||
332 | frame->sc.gui_width = this->gui_width; | ||
333 | frame->sc.gui_height = this->gui_height; | ||
334 | frame->sc.gui_pixel_aspect = 1.0; | ||
335 | |||
336 | |||
337 | null_frame_compute_ideal_size(this, frame); | ||
338 | null_frame_compute_rgb_size(this, frame); | ||
339 | null_frame_reallocate(this, frame, width, height, format); | ||
340 | |||
341 | #ifdef LOG | ||
342 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | ||
343 | frame->sc.gui_width, frame->sc.gui_height, | ||
344 | frame->sc.delivered_width, frame->sc.delivered_height, | ||
345 | frame->sc.output_width, frame->sc.output_height); | ||
346 | #endif | ||
347 | |||
348 | |||
349 | |||
286 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; | 350 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; |
287 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; | 351 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; |
288 | 352 | ||
289 | /* | 353 | /* |
290 | * set up colorspace converter | 354 | * set up colorspace converter |
291 | */ | 355 | */ |
356 | null_setup_colorspace_converter(frame, flags); | ||
292 | 357 | ||
293 | switch (flags) { | ||
294 | case VO_TOP_FIELD: | ||
295 | case VO_BOTTOM_FIELD: | ||
296 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
297 | frame->sc.delivered_width, | ||
298 | 16, | ||
299 | 2*frame->frame.pitches[0], | ||
300 | 2*frame->frame.pitches[1], | ||
301 | frame->sc.output_width, | ||
302 | frame->stripe_height, | ||
303 | frame->bytes_per_line*2); | ||
304 | frame->yuv_stride = frame->bytes_per_line*2; | ||
305 | break; | ||
306 | case VO_BOTH_FIELDS: | ||
307 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
308 | frame->sc.delivered_width, | ||
309 | 16, | ||
310 | frame->frame.pitches[0], | ||
311 | frame->frame.pitches[1], | ||
312 | frame->sc.output_width, | ||
313 | frame->stripe_height, | ||
314 | frame->bytes_per_line); | ||
315 | frame->yuv_stride = frame->bytes_per_line; | ||
316 | break; | ||
317 | } | ||
318 | #ifdef LOG | ||
319 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | ||
320 | #endif | ||
321 | } | 358 | } |
322 | |||
323 | /* | 359 | /* |
324 | * reset dest pointers | 360 | * reset dest pointers |
325 | */ | 361 | */ |
326 | 362 | ||
327 | if (frame->data) { | 363 | if (frame->data) { |
328 | switch (flags) { | 364 | switch (flags) { |
329 | case VO_TOP_FIELD: | 365 | case VO_TOP_FIELD: |
330 | frame->rgb_dst = (uint8_t *)frame->data; | 366 | frame->rgb_dst = (uint8_t *)frame->data; |
331 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 367 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
332 | break; | 368 | break; |
333 | case VO_BOTTOM_FIELD: | 369 | case VO_BOTTOM_FIELD: |
334 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 370 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
335 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 371 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
336 | break; | 372 | break; |
337 | case VO_BOTH_FIELDS: | 373 | case VO_BOTH_FIELDS: |
338 | frame->rgb_dst = (uint8_t *)frame->data; | 374 | frame->rgb_dst = (uint8_t *)frame->data; |
339 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; | 375 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; |
340 | break; | 376 | break; |
377 | } | ||
341 | } | 378 | } |
342 | } | ||
343 | } | 379 | } |
344 | 380 | ||
345 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ | 381 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ |
346 | null_driver_t* this = (null_driver_t*) self; | 382 | null_driver_t* this = (null_driver_t*) self; |
347 | opie_frame_t* frame = (opie_frame_t*)frame_gen; | 383 | opie_frame_t* frame = (opie_frame_t*)frame_gen; |
348 | display_xine_frame_t display = this->frameDis; | 384 | display_xine_frame_t display = this->frameDis; |
349 | 385 | ||
350 | if (!this->m_show_video) | 386 | if (!this->m_show_video) |
@@ -359,24 +395,28 @@ static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ | |||
359 | frame->frame.free(&frame->frame); | 395 | frame->frame.free(&frame->frame); |
360 | } | 396 | } |
361 | 397 | ||
362 | 398 | ||
363 | /* blending related */ | 399 | /* blending related */ |
364 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, | 400 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, |
365 | vo_overlay_t *overlay, | 401 | vo_overlay_t *overlay, |
366 | opie_frame_t *frame) { | 402 | opie_frame_t *frame) { |
403 | this = this; | ||
404 | |||
405 | |||
367 | int i; | 406 | int i; |
368 | clut_t* clut = (clut_t*) overlay->color; | 407 | clut_t* clut = (clut_t*) overlay->color; |
369 | if (!overlay->rgb_clut) { | 408 | if (!overlay->rgb_clut) { |
370 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 409 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
371 | *((uint32_t *)&clut[i]) = | 410 | *((uint32_t *)&clut[i]) = |
372 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, | 411 | frame->yuv2rgb-> |
373 | clut[i].y, clut[i].cb, | 412 | yuv2rgb_single_pixel_fun (frame->yuv2rgb, |
374 | clut[i].cr); | 413 | clut[i].y, clut[i].cb, |
414 | clut[i].cr); | ||
375 | } | 415 | } |
376 | overlay->rgb_clut++; | 416 | overlay->rgb_clut++; |
377 | } | 417 | } |
378 | if (!overlay->clip_rgb_clut) { | 418 | if (!overlay->clip_rgb_clut) { |
379 | clut = (clut_t*) overlay->clip_color; | 419 | clut = (clut_t*) overlay->clip_color; |
380 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 420 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
381 | *((uint32_t *)&clut[i]) = | 421 | *((uint32_t *)&clut[i]) = |
382 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, | 422 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, |
@@ -396,80 +436,143 @@ static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, | |||
396 | return; | 436 | return; |
397 | 437 | ||
398 | /* Alpha Blend here */ | 438 | /* Alpha Blend here */ |
399 | if (overlay->rle) { | 439 | if (overlay->rle) { |
400 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) | 440 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) |
401 | null_overlay_clut_yuv2rgb(this,overlay,frame); | 441 | null_overlay_clut_yuv2rgb(this,overlay,frame); |
402 | 442 | ||
403 | switch(this->bpp) { | 443 | switch(this->bpp) { |
404 | case 16: | 444 | case 16: |
405 | blend_rgb16( (uint8_t *)frame->data, overlay, | 445 | blend_rgb16((uint8_t *)frame->data, |
406 | frame->sc.output_width, frame->sc.output_height, | 446 | overlay, |
407 | frame->sc.delivered_width, frame->sc.delivered_height); | 447 | frame->sc.output_width, |
408 | break; | 448 | frame->sc.output_height, |
409 | case 24: | 449 | frame->sc.delivered_width, |
410 | blend_rgb24( (uint8_t *)frame->data, overlay, | 450 | frame->sc.delivered_height); |
411 | frame->sc.output_width, frame->sc.output_height, | 451 | break; |
412 | frame->sc.delivered_width, frame->sc.delivered_height); | 452 | case 24: |
413 | break; | 453 | blend_rgb24((uint8_t *)frame->data, |
414 | case 32: | 454 | overlay, |
415 | blend_rgb32( (uint8_t *)frame->data, overlay, | 455 | frame->sc.output_width, |
416 | frame->sc.output_width, frame->sc.output_height, | 456 | frame->sc.output_height, |
417 | frame->sc.delivered_width, frame->sc.delivered_height); | 457 | frame->sc.delivered_width, |
418 | break; | 458 | frame->sc.delivered_height); |
419 | default: | 459 | break; |
420 | /* It should never get here */ | 460 | case 32: |
421 | break; | 461 | blend_rgb32((uint8_t *)frame->data, |
462 | overlay, | ||
463 | frame->sc.output_width, | ||
464 | frame->sc.output_height, | ||
465 | frame->sc.delivered_width, | ||
466 | frame->sc.delivered_height); | ||
467 | break; | ||
468 | default: | ||
469 | /* It should never get here */ | ||
470 | break; | ||
422 | } | 471 | } |
423 | } | 472 | } |
424 | } | 473 | } |
425 | 474 | ||
426 | 475 | ||
427 | static int null_get_property( vo_driver_t* self, | 476 | static int null_get_property( vo_driver_t* self, int property ){ |
428 | int property ){ | 477 | #if 0 |
478 | null_driver_t *this = (null_driver_t *)self; | ||
479 | |||
480 | switch(property) | ||
481 | { | ||
482 | case VO_PROP_ASPECT_RATIO: | ||
483 | return this->sc.user_ratio; | ||
484 | case VO_PROP_BRIGHTNESS: | ||
485 | return this->yuv2rgb_brightness; | ||
486 | case VO_PROP_WINDOW_WIDTH: | ||
487 | return this->sc.gui_width; | ||
488 | case VO_PROP_WINDOW_HEIGHT: | ||
489 | return this->sc.gui_height; | ||
490 | default: | ||
491 | break; | ||
492 | } | ||
493 | #else | ||
494 | property = property; | ||
495 | self = self; | ||
496 | #endif | ||
497 | |||
429 | return 0; | 498 | return 0; |
430 | } | 499 | } |
431 | static int null_set_property( vo_driver_t* self, | 500 | static int null_set_property( vo_driver_t* self, int property, |
432 | int property, | ||
433 | int value ){ | 501 | int value ){ |
502 | #if 0 | ||
503 | null_driver_t *this = (null_driver_t *)self; | ||
504 | |||
505 | switch(property) | ||
506 | { | ||
507 | case VO_PROP_ASPECT_RATIO: | ||
508 | if(value>=XINE_VO_ASPECT_NUM_RATIOS) | ||
509 | value = XINE_VO_ASPECT_AUTO; | ||
510 | this->sc.user_ratio = value; | ||
511 | break; | ||
512 | case VO_PROP_BRIGHTNESS: | ||
513 | this->yuv2rgb_brightness = value; | ||
514 | this->yuv2rgb_factory-> | ||
515 | set_csc_levels(this->yuv2rgb_factory, value, 128, 128); | ||
516 | break; | ||
517 | default: | ||
518 | break; | ||
519 | } | ||
520 | #else | ||
521 | self = self; | ||
522 | property = property; | ||
523 | #endif | ||
524 | |||
434 | return value; | 525 | return value; |
435 | } | 526 | } |
436 | static void null_get_property_min_max( vo_driver_t* self, | 527 | static void null_get_property_min_max( vo_driver_t* self, |
437 | int property, int *min, | 528 | int property, int *min, |
438 | int *max ){ | 529 | int *max ){ |
530 | self = self; | ||
531 | property = property; | ||
532 | |||
439 | *max = 0; | 533 | *max = 0; |
440 | *min = 0; | 534 | *min = 0; |
441 | } | 535 | } |
442 | static int null_gui_data_exchange( vo_driver_t* self, | 536 | static int null_gui_data_exchange( vo_driver_t* self, |
443 | int data_type, | 537 | int data_type, |
444 | void *data ){ | 538 | void *data ){ |
539 | self = self; | ||
540 | data_type = data_type; | ||
541 | data = data; | ||
542 | |||
445 | return 0; | 543 | return 0; |
446 | } | 544 | } |
447 | 545 | ||
448 | static void null_dispose ( vo_driver_t* self ){ | 546 | static void null_dispose ( vo_driver_t* self ){ |
449 | null_driver_t* this = (null_driver_t*)self; | 547 | null_driver_t* this = (null_driver_t*)self; |
450 | free ( this ); | 548 | free ( this ); |
451 | } | 549 | } |
452 | static int null_redraw_needed( vo_driver_t* self ){ | 550 | static int null_redraw_needed( vo_driver_t* self ){ |
453 | return 0; | 551 | self = self; |
552 | |||
553 | return 0; | ||
454 | } | 554 | } |
455 | 555 | ||
456 | 556 | ||
457 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | 557 | xine_video_port_t* init_video_out_plugin( xine_t *xine, |
458 | void* video, | 558 | void* video, |
459 | display_xine_frame_t frameDisplayFunc, | 559 | display_xine_frame_t frameDisplayFunc, |
460 | void *userData ){ | 560 | void *userData ){ |
561 | video = video; | ||
562 | |||
563 | |||
461 | null_driver_t *vo; | 564 | null_driver_t *vo; |
462 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); | 565 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); |
463 | 566 | ||
464 | /* memset? */ | 567 | /* memset? */ |
465 | memset(vo,0, sizeof(null_driver_t ) ); | 568 | memset(vo,0, sizeof(null_driver_t ) ); |
466 | 569 | ||
467 | vo_scale_init (&vo->sc, 0, 0, xine->config); | 570 | _x_vo_scale_init (&vo->sc, 0, 0, xine->config); |
468 | 571 | ||
469 | vo->sc.gui_pixel_aspect = 1.0; | 572 | vo->sc.gui_pixel_aspect = 1.0; |
470 | 573 | ||
471 | vo->m_show_video = 0; // false | 574 | vo->m_show_video = 0; // false |
472 | vo->m_video_fullscreen = 0; | 575 | vo->m_video_fullscreen = 0; |
473 | vo->m_is_scaling = 0; | 576 | vo->m_is_scaling = 0; |
474 | vo->display_ratio = 1.0; | 577 | vo->display_ratio = 1.0; |
475 | vo->gui_width = 16; | 578 | vo->gui_width = 16; |
@@ -486,39 +589,26 @@ xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | |||
486 | vo->vo_driver.set_property = null_set_property; | 589 | vo->vo_driver.set_property = null_set_property; |
487 | vo->vo_driver.get_property_min_max = null_get_property_min_max; | 590 | vo->vo_driver.get_property_min_max = null_get_property_min_max; |
488 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; | 591 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; |
489 | vo->vo_driver.dispose = null_dispose; | 592 | vo->vo_driver.dispose = null_dispose; |
490 | vo->vo_driver.redraw_needed = null_redraw_needed; | 593 | vo->vo_driver.redraw_needed = null_redraw_needed; |
491 | 594 | ||
492 | 595 | ||
493 | /* capabilities */ | 596 | /* capabilities */ |
494 | vo->m_capabilities = /* VO_CAP_COPIES_IMAGE | */ VO_CAP_YUY2 | VO_CAP_YV12; | 597 | vo->m_capabilities = VO_CAP_YUY2 | VO_CAP_YV12; |
495 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, | 598 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, |
496 | vo->yuv2rgb_cmap); | 599 | vo->yuv2rgb_cmap); |
497 | 600 | ||
498 | vo->caller = userData; | 601 | vo->caller = userData; |
499 | vo->frameDis = frameDisplayFunc; | 602 | vo->frameDis = frameDisplayFunc; |
500 | 603 | ||
501 | /* return ( vo_driver_t*) vo; */ | 604 | return _x_vo_new_port(xine, &vo->vo_driver, 0); |
502 | return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); | ||
503 | } | 605 | } |
504 | 606 | ||
505 | #if 0 | ||
506 | static vo_info_t vo_info_null = { | ||
507 | 5, | ||
508 | XINE_VISUAL_TYPE_FB | ||
509 | }; | ||
510 | |||
511 | vo_info_t *get_video_out_plugin_info(){ | ||
512 | vo_info_null.description = ("xine video output plugin using null device"); | ||
513 | return &vo_info_null; | ||
514 | } | ||
515 | |||
516 | #endif | ||
517 | 607 | ||
518 | /* this is special for this device */ | 608 | /* this is special for this device */ |
519 | /** | 609 | /** |
520 | * We know that we will be controled by the XINE LIB++ | 610 | * We know that we will be controled by the XINE LIB++ |
521 | */ | 611 | */ |
522 | 612 | ||
523 | /** | 613 | /** |
524 | * | 614 | * |
@@ -559,18 +649,17 @@ void null_set_gui_height( xine_vo_driver_t* self, int height ) { | |||
559 | 649 | ||
560 | 650 | ||
561 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | 651 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { |
562 | null_driver_t* this = (null_driver_t*)self->driver; | 652 | null_driver_t* this = (null_driver_t*)self->driver; |
563 | 653 | ||
564 | this->bytes_per_pixel = (depth + 7 ) / 8; | 654 | this->bytes_per_pixel = (depth + 7 ) / 8; |
565 | this->bpp = this->bytes_per_pixel * 8; | 655 | this->bpp = this->bytes_per_pixel * 8; |
566 | this->depth = depth; | 656 | this->depth = depth; |
567 | printf("depth %d %d\n", depth, this->bpp); | 657 | |
568 | printf("pixeltype %d\n", rgb ); | ||
569 | switch ( this->depth ) { | 658 | switch ( this->depth ) { |
570 | case 32: | 659 | case 32: |
571 | if( rgb == 0 ) | 660 | if( rgb == 0 ) |
572 | this->yuv2rgb_mode = MODE_32_RGB; | 661 | this->yuv2rgb_mode = MODE_32_RGB; |
573 | else | 662 | else |
574 | this->yuv2rgb_mode = MODE_32_BGR; | 663 | this->yuv2rgb_mode = MODE_32_BGR; |
575 | case 24: | 664 | case 24: |
576 | if( this->bpp == 32 ) { | 665 | if( this->bpp == 32 ) { |
@@ -623,20 +712,24 @@ void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, | |||
623 | void null_preload_decoders( xine_stream_t *stream ) | 712 | void null_preload_decoders( xine_stream_t *stream ) |
624 | { | 713 | { |
625 | static const uint32_t preloadedAudioDecoders[] = { BUF_AUDIO_MPEG, BUF_AUDIO_VORBIS }; | 714 | static const uint32_t preloadedAudioDecoders[] = { BUF_AUDIO_MPEG, BUF_AUDIO_VORBIS }; |
626 | static const uint8_t preloadedAudioDecoderCount = sizeof( preloadedAudioDecoders ) / sizeof( preloadedAudioDecoders[ 0 ] ); | 715 | static const uint8_t preloadedAudioDecoderCount = sizeof( preloadedAudioDecoders ) / sizeof( preloadedAudioDecoders[ 0 ] ); |
627 | static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; | 716 | static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; |
628 | static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); | 717 | static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); |
629 | 718 | ||
630 | uint8_t i; | 719 | uint8_t i; |
720 | #if 0 | ||
631 | 721 | ||
632 | for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { | 722 | for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { |
633 | audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); | 723 | audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); |
724 | decoder = decoder; | ||
634 | /* free_audio_decoder( stream, decoder ); */ | 725 | /* free_audio_decoder( stream, decoder ); */ |
635 | } | 726 | } |
636 | 727 | ||
637 | for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { | 728 | for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { |
638 | video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); | 729 | video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); |
730 | decoder = decoder; | ||
639 | /* free_video_decoder( stream, decoder ); */ | 731 | /* free_video_decoder( stream, decoder ); */ |
640 | } | 732 | } |
733 | #endif | ||
641 | } | 734 | } |
642 | 735 | ||
diff --git a/noncore/multimedia/opieplayer2/opieplayer2.pro b/noncore/multimedia/opieplayer2/opieplayer2.pro index b9a8d6d..8166658 100644 --- a/noncore/multimedia/opieplayer2/opieplayer2.pro +++ b/noncore/multimedia/opieplayer2/opieplayer2.pro | |||
@@ -10,17 +10,17 @@ SOURCES = main.cpp \ | |||
10 | frame.cpp lib.cpp nullvideo.c xinevideowidget.cpp volumecontrol.cpp \ | 10 | frame.cpp lib.cpp nullvideo.c xinevideowidget.cpp volumecontrol.cpp \ |
11 | playlistwidgetgui.cpp\ | 11 | playlistwidgetgui.cpp\ |
12 | alphablend.c yuv2rgb.c yuv2rgb_arm2.c yuv2rgb_arm4l.S \ | 12 | alphablend.c yuv2rgb.c yuv2rgb_arm2.c yuv2rgb_arm4l.S \ |
13 | threadutil.cpp mediawidget.cpp playlistview.cpp playlistfileview.cpp \ | 13 | threadutil.cpp mediawidget.cpp playlistview.cpp playlistfileview.cpp \ |
14 | skin.cpp | 14 | skin.cpp |
15 | TARGET = opieplayer2 | 15 | TARGET = opieplayer2 |
16 | INCLUDEPATH += $(OPIEDIR)/include | 16 | INCLUDEPATH += $(OPIEDIR)/include |
17 | DEPENDPATH += $(OPIEDIR)/include | 17 | DEPENDPATH += $(OPIEDIR)/include |
18 | LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lxine -lstdc++ | 18 | LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lqtaux2 -lxine |
19 | MOC_DIR = qpeobj | 19 | MOC_DIR = qpeobj |
20 | OBJECTS_DIR = qpeobj | 20 | OBJECTS_DIR = qpeobj |
21 | 21 | ||
22 | include ( $(OPIEDIR)/include.pro ) | 22 | include ( $(OPIEDIR)/include.pro ) |
23 | 23 | ||
24 | !isEmpty( LIBXINE_INC_DIR ) { | 24 | !isEmpty( LIBXINE_INC_DIR ) { |
25 | INCLUDEPATH = $$LIBXINE_INC_DIR $$INCLUDEPATH | 25 | INCLUDEPATH = $$LIBXINE_INC_DIR $$INCLUDEPATH |
26 | } | 26 | } |
diff --git a/noncore/multimedia/opieplayer2/xinevideowidget.cpp b/noncore/multimedia/opieplayer2/xinevideowidget.cpp index 15c611f..1ac9277 100644 --- a/noncore/multimedia/opieplayer2/xinevideowidget.cpp +++ b/noncore/multimedia/opieplayer2/xinevideowidget.cpp | |||
@@ -46,16 +46,17 @@ | |||
46 | 46 | ||
47 | // 180 deg rot: copy a line from src to dst reversed | 47 | // 180 deg rot: copy a line from src to dst reversed |
48 | 48 | ||
49 | /* | 49 | /* |
50 | * This code relies the len be a multiply of 16bit | 50 | * This code relies the len be a multiply of 16bit |
51 | */ | 51 | */ |
52 | static inline void memcpy_rev ( void *_dst, void *_src, size_t len ) | 52 | static inline void memcpy_rev ( void *_dst, void *_src, size_t len ) |
53 | { | 53 | { |
54 | |||
54 | /* | 55 | /* |
55 | * move the source to the end | 56 | * move the source to the end |
56 | */ | 57 | */ |
57 | char *src_c = static_cast<char*>(_src) + len; | 58 | char *src_c = static_cast<char*>(_src) + len; |
58 | 59 | ||
59 | /* | 60 | /* |
60 | * as we copy by 16bit and not 8bit | 61 | * as we copy by 16bit and not 8bit |
61 | * devide the length by two | 62 | * devide the length by two |