author | zecke <zecke> | 2004-09-23 19:02:47 (UTC) |
---|---|---|
committer | zecke <zecke> | 2004-09-23 19:02:47 (UTC) |
commit | 9a7e9427062e820f7b654e77e051213c3f53e134 (patch) (unidiff) | |
tree | b37f5d9dd37a846551c44feca40c24c56ffc7a05 | |
parent | 69bf1d25b253167f3d2ef4b162c42aec4d8bbf7a (diff) | |
download | opie-9a7e9427062e820f7b654e77e051213c3f53e134.zip opie-9a7e9427062e820f7b654e77e051213c3f53e134.tar.gz opie-9a7e9427062e820f7b654e77e051213c3f53e134.tar.bz2 |
-OTicker is in libqtaux so we need to link it to avoid weird crashes
-Some function names have changed in xine update them
-Start to merge video_out_fb.c changes into nullvideo and give credit
to the source of it
-rw-r--r-- | noncore/multimedia/opieplayer2/lib.cpp | 15 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 359 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/opieplayer2.pro | 2 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/xinevideowidget.cpp | 1 |
4 files changed, 236 insertions, 141 deletions
diff --git a/noncore/multimedia/opieplayer2/lib.cpp b/noncore/multimedia/opieplayer2/lib.cpp index 1ab5c96..248221b 100644 --- a/noncore/multimedia/opieplayer2/lib.cpp +++ b/noncore/multimedia/opieplayer2/lib.cpp | |||
@@ -33,32 +33,33 @@ | |||
33 | 33 | ||
34 | #include "xinevideowidget.h" | 34 | #include "xinevideowidget.h" |
35 | #include "frame.h" | 35 | #include "frame.h" |
36 | #include "lib.h" | 36 | #include "lib.h" |
37 | 37 | ||
38 | /* OPIE */ | 38 | /* OPIE */ |
39 | #include <opie2/odebug.h> | 39 | #include <opie2/odebug.h> |
40 | using namespace Opie::Core; | 40 | using namespace Opie::Core; |
41 | 41 | ||
42 | /* QT */ | 42 | /* QT */ |
43 | #include <qtextstream.h> | 43 | #include <qtextstream.h> |
44 | #include <qdir.h> | 44 | #include <qdir.h> |
45 | #include <qgfx_qws.h> | 45 | #include <qgfx_qws.h> |
46 | 46 | ||
47 | /* STD */ | 47 | /* STD */ |
48 | #include <assert.h> | 48 | #include <assert.h> |
49 | #include <unistd.h> | ||
49 | 50 | ||
50 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, | 51 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, |
51 | int width, int height,int bytes ); | 52 | int width, int height,int bytes ); |
52 | 53 | ||
53 | extern "C" { | 54 | extern "C" { |
54 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); | 55 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); |
55 | int null_is_showing_video( const xine_vo_driver_t* self ); | 56 | int null_is_showing_video( const xine_vo_driver_t* self ); |
56 | void null_set_show_video( const xine_vo_driver_t* self, int show ); | 57 | void null_set_show_video( const xine_vo_driver_t* self, int show ); |
57 | int null_is_fullscreen( const xine_vo_driver_t* self ); | 58 | int null_is_fullscreen( const xine_vo_driver_t* self ); |
58 | void null_set_fullscreen( const xine_vo_driver_t* self, int screen ); | 59 | void null_set_fullscreen( const xine_vo_driver_t* self, int screen ); |
59 | int null_is_scaling( const xine_vo_driver_t* self ); | 60 | int null_is_scaling( const xine_vo_driver_t* self ); |
60 | void null_set_scaling( const xine_vo_driver_t* self, int scale ); | 61 | void null_set_scaling( const xine_vo_driver_t* self, int scale ); |
61 | void null_set_gui_width( const xine_vo_driver_t* self, int width ); | 62 | void null_set_gui_width( const xine_vo_driver_t* self, int width ); |
62 | void null_set_gui_height( const xine_vo_driver_t* self, int height ); | 63 | void null_set_gui_height( const xine_vo_driver_t* self, int height ); |
63 | void null_set_mode( const xine_vo_driver_t* self, int depth, int rgb ); | 64 | void null_set_mode( const xine_vo_driver_t* self, int depth, int rgb ); |
64 | void null_set_videoGamma( const xine_vo_driver_t* self , int value ); | 65 | void null_set_videoGamma( const xine_vo_driver_t* self , int value ); |
@@ -95,36 +96,36 @@ Lib::Lib( InitializationMode initMode, XineVideoWidget* widget ) | |||
95 | ts << "audio.out_size_audio_buf:8096\n"; | 96 | ts << "audio.out_size_audio_buf:8096\n"; |
96 | ts << "audio.out_size_zero_buf:1024\n"; | 97 | ts << "audio.out_size_zero_buf:1024\n"; |
97 | ts << "audio.passthrough_offset:0\n"; | 98 | ts << "audio.passthrough_offset:0\n"; |
98 | f.close(); | 99 | f.close(); |
99 | } | 100 | } |
100 | 101 | ||
101 | if ( initMode == InitializeImmediately ) { | 102 | if ( initMode == InitializeImmediately ) { |
102 | initialize(); | 103 | initialize(); |
103 | m_initialized = true; | 104 | m_initialized = true; |
104 | } | 105 | } |
105 | else | 106 | else |
106 | start(); | 107 | start(); |
107 | } | 108 | } |
108 | 109 | ||
109 | void Lib::run() | 110 | void Lib::run() |
110 | { | 111 | { |
111 | odebug << "Lib::run() started" << oendl; | 112 | odebug << "Lib::run() started" << oendl; |
112 | initialize(); | 113 | initialize(); |
113 | m_initialized = true; | 114 | m_initialized = true; |
114 | odebug << "Lib::run() finished" << oendl; | 115 | odebug << "Lib::run() finished" << oendl; |
115 | } | 116 | } |
116 | 117 | ||
117 | void Lib::initialize() | 118 | void Lib::initialize() |
118 | { | 119 | { |
119 | m_duringInitialization = true; | 120 | m_duringInitialization = true; |
120 | m_xine = xine_new( ); | 121 | m_xine = xine_new( ); |
121 | 122 | ||
122 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; | 123 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; |
123 | xine_config_load( m_xine, QFile::encodeName( configPath ) ); | 124 | xine_config_load( m_xine, QFile::encodeName( configPath ) ); |
124 | 125 | ||
125 | xine_init( m_xine ); | 126 | xine_init( m_xine ); |
126 | 127 | ||
127 | // allocate oss for sound | 128 | // allocate oss for sound |
128 | // and fb for framebuffer | 129 | // and fb for framebuffer |
129 | m_audioOutput = xine_open_audio_driver( m_xine, "oss", NULL ); | 130 | m_audioOutput = xine_open_audio_driver( m_xine, "oss", NULL ); |
130 | m_videoOutput = ::init_video_out_plugin( m_xine, NULL, xine_display_frame, this ); | 131 | m_videoOutput = ::init_video_out_plugin( m_xine, NULL, xine_display_frame, this ); |
@@ -133,33 +134,33 @@ void Lib::initialize() | |||
133 | //xine_open_video_driver( m_xine, NULL, XINE_VISUAL_TYPE_FB, NULL); | 134 | //xine_open_video_driver( m_xine, NULL, XINE_VISUAL_TYPE_FB, NULL); |
134 | 135 | ||
135 | 136 | ||
136 | // null_display_handler( m_videoOutput, xine_display_frame, this ); | 137 | // null_display_handler( m_videoOutput, xine_display_frame, this ); |
137 | 138 | ||
138 | m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); | 139 | m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); |
139 | 140 | ||
140 | if (m_wid != 0 ) { | 141 | if (m_wid != 0 ) { |
141 | printf( "!0\n" ); | 142 | printf( "!0\n" ); |
142 | setWidget( m_wid ); | 143 | setWidget( m_wid ); |
143 | } | 144 | } |
144 | 145 | ||
145 | m_queue = xine_event_new_queue (m_stream); | 146 | m_queue = xine_event_new_queue (m_stream); |
146 | 147 | ||
147 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); | 148 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); |
148 | 149 | ||
149 | ::null_preload_decoders( m_stream ); | 150 | ::null_preload_decoders( m_stream ); |
150 | 151 | ||
151 | m_duringInitialization = false; | 152 | m_duringInitialization = false; |
152 | } | 153 | } |
153 | 154 | ||
154 | Lib::~Lib() { | 155 | Lib::~Lib() { |
155 | assert( isRunning() == false ); | 156 | assert( isRunning() == false ); |
156 | assert( m_initialized ); | 157 | assert( m_initialized ); |
157 | 158 | ||
158 | // free( m_config ); | 159 | // free( m_config ); |
159 | 160 | ||
160 | xine_close( m_stream ); | 161 | xine_close( m_stream ); |
161 | 162 | ||
162 | xine_event_dispose_queue( m_queue ); | 163 | xine_event_dispose_queue( m_queue ); |
163 | 164 | ||
164 | xine_dispose( m_stream ); | 165 | xine_dispose( m_stream ); |
165 | 166 | ||
@@ -204,33 +205,33 @@ int Lib::play( const QString& fileName, int startPos, int start_time ) { | |||
204 | 205 | ||
205 | QString str = fileName.stripWhiteSpace(); | 206 | QString str = fileName.stripWhiteSpace(); |
206 | 207 | ||
207 | //m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); | 208 | //m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); |
208 | //m_queue = xine_event_new_queue (m_stream); | 209 | //m_queue = xine_event_new_queue (m_stream); |
209 | //xine_event_create_listener_thread (m_queue, xine_event_handler, this); | 210 | //xine_event_create_listener_thread (m_queue, xine_event_handler, this); |
210 | 211 | ||
211 | if ( !xine_open( m_stream, str.utf8().data() ) ) { | 212 | if ( !xine_open( m_stream, str.utf8().data() ) ) { |
212 | return 0; | 213 | return 0; |
213 | } | 214 | } |
214 | return xine_play( m_stream, startPos, start_time); | 215 | return xine_play( m_stream, startPos, start_time); |
215 | } | 216 | } |
216 | 217 | ||
217 | void Lib::stop() { | 218 | void Lib::stop() { |
218 | assert( m_initialized ); | 219 | assert( m_initialized ); |
219 | 220 | ||
220 | odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; | 221 | odebug << "<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>" << oendl; |
221 | xine_stop( m_stream ); | 222 | xine_stop( m_stream ); |
222 | } | 223 | } |
223 | 224 | ||
224 | void Lib::pause( bool toggle ) { | 225 | void Lib::pause( bool toggle ) { |
225 | assert( m_initialized ); | 226 | assert( m_initialized ); |
226 | 227 | ||
227 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); | 228 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); |
228 | } | 229 | } |
229 | 230 | ||
230 | int Lib::speed() const { | 231 | int Lib::speed() const { |
231 | assert( m_initialized ); | 232 | assert( m_initialized ); |
232 | 233 | ||
233 | return xine_get_param ( m_stream, XINE_PARAM_SPEED ); | 234 | return xine_get_param ( m_stream, XINE_PARAM_SPEED ); |
234 | } | 235 | } |
235 | 236 | ||
236 | void Lib::setSpeed( int speed ) { | 237 | void Lib::setSpeed( int speed ) { |
@@ -316,35 +317,35 @@ QString Lib::metaInfo( int number) const { | |||
316 | assert( m_initialized ); | 317 | assert( m_initialized ); |
317 | 318 | ||
318 | return xine_get_meta_info( m_stream, number ); | 319 | return xine_get_meta_info( m_stream, number ); |
319 | } | 320 | } |
320 | 321 | ||
321 | int Lib::error() const { | 322 | int Lib::error() const { |
322 | assert( m_initialized ); | 323 | assert( m_initialized ); |
323 | 324 | ||
324 | return xine_get_error( m_stream ); | 325 | return xine_get_error( m_stream ); |
325 | }; | 326 | }; |
326 | 327 | ||
327 | void Lib::ensureInitialized() | 328 | void Lib::ensureInitialized() |
328 | { | 329 | { |
329 | if ( m_initialized ) | 330 | if ( m_initialized ) |
330 | return; | 331 | return; |
331 | 332 | ||
332 | odebug << "waiting for initialization thread to finish" << oendl; | 333 | odebug << "waiting for initialization thread to finish" << oendl; |
333 | wait(); | 334 | wait(); |
334 | odebug << "initialization thread finished!" << oendl; | 335 | odebug << "initialization thread finished!" << oendl; |
335 | } | 336 | } |
336 | 337 | ||
337 | void Lib::setWidget( XineVideoWidget *widget ) | 338 | void Lib::setWidget( XineVideoWidget *widget ) |
338 | { | 339 | { |
339 | m_wid = widget; | 340 | m_wid = widget; |
340 | resize ( m_wid-> size ( ) ); | 341 | resize ( m_wid-> size ( ) ); |
341 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); | 342 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); |
342 | m_wid->repaint(); | 343 | m_wid->repaint(); |
343 | } | 344 | } |
344 | 345 | ||
345 | void Lib::receiveMessage( ThreadUtil::ChannelMessage *msg, SendType sendType ) | 346 | void Lib::receiveMessage( ThreadUtil::ChannelMessage *msg, SendType sendType ) |
346 | { | 347 | { |
347 | assert( sendType == ThreadUtil::Channel::OneWay ); | 348 | assert( sendType == ThreadUtil::Channel::OneWay ); |
348 | handleXineEvent( msg->type() ); | 349 | handleXineEvent( msg->type() ); |
349 | delete msg; | 350 | delete msg; |
350 | } | 351 | } |
@@ -411,24 +412,24 @@ bool Lib::isScaling() const { | |||
411 | return ::null_is_scaling( m_videoOutput ); | 412 | return ::null_is_scaling( m_videoOutput ); |
412 | } | 413 | } |
413 | 414 | ||
414 | void Lib::xine_event_handler( void* user_data, const xine_event_t* t ) { | 415 | void Lib::xine_event_handler( void* user_data, const xine_event_t* t ) { |
415 | ( (Lib*)user_data)->handleXineEvent( t ); | 416 | ( (Lib*)user_data)->handleXineEvent( t ); |
416 | } | 417 | } |
417 | 418 | ||
418 | void Lib::xine_display_frame( void* user_data, uint8_t *frame, | 419 | void Lib::xine_display_frame( void* user_data, uint8_t *frame, |
419 | int width, int height, int bytes ) { | 420 | int width, int height, int bytes ) { |
420 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); | 421 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); |
421 | } | 422 | } |
422 | 423 | ||
423 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { | 424 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { |
424 | assert( m_initialized ); | 425 | assert( m_initialized ); |
425 | 426 | ||
426 | if ( !m_video ) { | 427 | if ( !m_video ) { |
427 | owarn << "not showing video now" << oendl; | 428 | owarn << "not showing video now" << oendl; |
428 | return; | 429 | return; |
429 | } | 430 | } |
430 | 431 | ||
431 | assert( m_wid ); | 432 | assert( m_wid ); |
432 | 433 | ||
433 | m_wid-> setVideoFrame ( frame, width, height, bytes ); | 434 | m_wid-> setVideoFrame ( frame, width, height, bytes ); |
434 | } | 435 | } |
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 378bbd4..6769a37 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c | |||
@@ -1,22 +1,23 @@ | |||
1 | /* | 1 | /* |
2 | This file is part of the Opie Project | 2 | This file is part of the Opie Project |
3 | 3 | ||
4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> | 4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> |
5 | Copyright (c) 2002 LJP <> | 5 | Copyright (c) 2002 LJP <> |
6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> | 6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> |
7 | Copyright (c) 2002-2003 Miguel Freitas of xine | ||
7 | =. | 8 | =. |
8 | .=l. | 9 | .=l. |
9 | .>+-= | 10 | .>+-= |
10 | _;:, .> :=|. This program is free software; you can | 11 | _;:, .> :=|. This program is free software; you can |
11 | .> <`_, > . <= redistribute it and/or modify it under | 12 | .> <`_, > . <= redistribute it and/or modify it under |
12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public | 13 | :`=1 )Y*s>-.-- : the terms of the GNU General Public |
13 | .="- .-=="i, .._ License as published by the Free Software | 14 | .="- .-=="i, .._ License as published by the Free Software |
14 | - . .-<_> .<> Foundation; either version 2 of the License, | 15 | - . .-<_> .<> Foundation; either version 2 of the License, |
15 | ._= =} : or (at your option) any later version. | 16 | ._= =} : or (at your option) any later version. |
16 | .%`+i> _;_. | 17 | .%`+i> _;_. |
17 | .i_,=:_. -<s. This program is distributed in the hope that | 18 | .i_,=:_. -<s. This program is distributed in the hope that |
18 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; | 19 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; |
19 | : .. .:, . . . without even the implied warranty of | 20 | : .. .:, . . . without even the implied warranty of |
20 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A | 21 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A |
21 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU | 22 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU |
22 | ..}^=.= = ; Library General Public License for more | 23 | ..}^=.= = ; Library General Public License for more |
@@ -100,33 +101,33 @@ struct opie_frame_s { | |||
100 | uint8_t *data; /* rgb */ | 101 | uint8_t *data; /* rgb */ |
101 | int bytes_per_line; | 102 | int bytes_per_line; |
102 | 103 | ||
103 | yuv2rgb_t *yuv2rgb; | 104 | yuv2rgb_t *yuv2rgb; |
104 | uint8_t *rgb_dst; | 105 | uint8_t *rgb_dst; |
105 | int yuv_stride; | 106 | int yuv_stride; |
106 | int stripe_height, stripe_inc; | 107 | int stripe_height, stripe_inc; |
107 | 108 | ||
108 | null_driver_t *output; | 109 | null_driver_t *output; |
109 | }; | 110 | }; |
110 | 111 | ||
111 | static uint32_t null_get_capabilities( vo_driver_t *self ){ | 112 | static uint32_t null_get_capabilities( vo_driver_t *self ){ |
112 | null_driver_t* this = (null_driver_t*)self; | 113 | null_driver_t* this = (null_driver_t*)self; |
113 | return this->m_capabilities; | 114 | return this->m_capabilities; |
114 | } | 115 | } |
115 | 116 | ||
116 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { | 117 | static void null_frame_proc_slice (vo_frame_t *vo_img, uint8_t **src) { |
117 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 118 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
118 | 119 | ||
119 | vo_img->proc_called = 1; | 120 | vo_img->proc_called = 1; |
120 | 121 | ||
121 | if (!frame->output->m_show_video) { | 122 | if (!frame->output->m_show_video) { |
122 | /* printf("nullvideo: no video\n"); */ | 123 | /* printf("nullvideo: no video\n"); */ |
123 | return; | 124 | return; |
124 | } | 125 | } |
125 | 126 | ||
126 | if (frame->format == XINE_IMGFMT_YV12) { | 127 | if (frame->format == XINE_IMGFMT_YV12) { |
127 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 128 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
128 | src[0], src[1], src[2]); | 129 | src[0], src[1], src[2]); |
129 | } else { | 130 | } else { |
130 | 131 | ||
131 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 132 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
132 | src[0]); | 133 | src[0]); |
@@ -162,371 +163,460 @@ static void null_frame_dispose( vo_frame_t* vo_img){ | |||
162 | if (frame->data) | 163 | if (frame->data) |
163 | free( frame->data ); | 164 | free( frame->data ); |
164 | free (frame); | 165 | free (frame); |
165 | } | 166 | } |
166 | 167 | ||
167 | /* end take care of frames*/ | 168 | /* end take care of frames*/ |
168 | 169 | ||
169 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ | 170 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ |
170 | 171 | ||
171 | null_driver_t* this = (null_driver_t*)self; | 172 | null_driver_t* this = (null_driver_t*)self; |
172 | opie_frame_t* frame; | 173 | opie_frame_t* frame; |
173 | 174 | ||
174 | #ifdef LOG | 175 | #ifdef LOG |
175 | fprintf (stderr, "nullvideo: alloc_frame\n"); | 176 | fprintf (stderr, "nullvideo: alloc_frame\n"); |
176 | #endif | 177 | #endif |
177 | 178 | ||
178 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); | 179 | frame = (opie_frame_t*)xine_xmalloc ( sizeof(opie_frame_t) ); |
179 | 180 | ||
180 | memset( frame, 0, sizeof( opie_frame_t) ); | ||
181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); | 181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); |
182 | 182 | ||
183 | pthread_mutex_init (&frame->frame.mutex, NULL); | 183 | pthread_mutex_init (&frame->frame.mutex, NULL); |
184 | 184 | ||
185 | frame->output = this; | 185 | frame->output = this; |
186 | 186 | ||
187 | /* initialize the frame*/ | 187 | /* initialize the frame*/ |
188 | frame->frame.driver = self; | 188 | frame->frame.driver = self; |
189 | frame->frame.proc_slice = null_frame_copy; | 189 | frame->frame.proc_slice = null_frame_proc_slice; |
190 | frame->frame.field = null_frame_field; | 190 | frame->frame.field = null_frame_field; |
191 | frame->frame.dispose = null_frame_dispose; | 191 | frame->frame.dispose = null_frame_dispose; |
192 | 192 | ||
193 | /* | 193 | /* |
194 | * colorspace converter for this frame | 194 | * colorspace converter for this frame |
195 | */ | 195 | */ |
196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); | 196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); |
197 | 197 | ||
198 | return (vo_frame_t*) frame; | 198 | return (vo_frame_t*) frame; |
199 | } | 199 | } |
200 | 200 | ||
201 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | ||
202 | uint32_t width, uint32_t height, | ||
203 | double ratio_code, int format, | ||
204 | int flags ){ | ||
205 | null_driver_t* this = (null_driver_t*) self; | ||
206 | opie_frame_t* frame = (opie_frame_t*)img; | ||
207 | /* not needed now */ | ||
208 | 201 | ||
209 | #ifdef LOG | 202 | static void null_frame_compute_ideal_size( null_driver_t *this, |
210 | fprintf (stderr, "nullvideo: update_frame_format\n"); | 203 | opie_frame_t *frame ) { |
211 | #endif | 204 | this = this; |
212 | 205 | ||
213 | flags &= VO_BOTH_FIELDS; | 206 | _x_vo_scale_compute_ideal_size(&frame->sc); |
207 | } | ||
214 | 208 | ||
215 | /* find out if we need to adapt this frame */ | 209 | static void null_frame_compute_rgb_size( null_driver_t *this, |
210 | opie_frame_t *frame ){ | ||
211 | this = this; | ||
216 | 212 | ||
217 | if ((width != frame->sc.delivered_width) | 213 | _x_vo_scale_compute_output_size(&frame->sc); |
218 | || (height != frame->sc.delivered_height) | ||
219 | || (ratio_code != frame->sc.delivered_ratio) | ||
220 | || (flags != frame->flags) | ||
221 | || (format != frame->format) | ||
222 | || (this->sc.user_ratio != frame->sc.user_ratio) | ||
223 | || (this->gui_width != frame->sc.gui_width) | ||
224 | || (this->gui_height != frame->sc.gui_height)) { | ||
225 | 214 | ||
226 | frame->sc.delivered_width = width; | 215 | /* avoid problems in yuv2rgb */ |
227 | frame->sc.delivered_height = height; | 216 | if(frame->sc.output_height < (frame->sc.delivered_height+15) >> 4) |
228 | frame->sc.delivered_ratio = ratio_code; | 217 | frame->sc.output_height = (frame->sc.delivered_height+15) >> 4; |
229 | frame->flags = flags; | ||
230 | frame->format = format; | ||
231 | frame->sc.user_ratio = this->sc.user_ratio; | ||
232 | frame->sc.gui_width = this->gui_width; | ||
233 | frame->sc.gui_height = this->gui_height; | ||
234 | frame->sc.gui_pixel_aspect = 1.0; | ||
235 | 218 | ||
236 | vo_scale_compute_ideal_size ( &frame->sc ); | 219 | if (frame->sc.output_width < 8) |
237 | vo_scale_compute_output_size( &frame->sc ); | 220 | frame->sc.output_width = 8; |
238 | 221 | ||
239 | #ifdef LOG | 222 | /* yuv2rgb_mlib needs an even YUV2 width */ |
240 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | 223 | if (frame->sc.output_width & 1) |
241 | frame->sc.gui_width, frame->sc.gui_height, | 224 | frame->sc.output_width++; |
242 | frame->sc.delivered_width, frame->sc.delivered_height, | 225 | } |
243 | frame->sc.output_width, frame->sc.output_height); | ||
244 | #endif | ||
245 | 226 | ||
227 | static void null_frame_reallocate( null_driver_t *this, opie_frame_t *frame, | ||
228 | uint32_t width, uint32_t height, int format){ | ||
246 | /* | 229 | /* |
247 | * (re-) allocate | 230 | * (re-) allocate |
248 | */ | 231 | */ |
249 | if( frame->data ) { | 232 | if( frame->data ) { |
250 | if( frame->chunk[0] ){ | 233 | if( frame->chunk[0] ){ |
251 | free( frame->chunk[0] ); | 234 | free( frame->chunk[0] ); |
252 | frame->chunk[0] = NULL; | 235 | frame->chunk[0] = NULL; |
253 | } | 236 | } |
254 | if( frame->chunk[1] ){ | 237 | if( frame->chunk[1] ){ |
255 | free ( frame->chunk[1] ); | 238 | free ( frame->chunk[1] ); |
256 | frame->chunk[1] = NULL; | 239 | frame->chunk[1] = NULL; |
257 | } | 240 | } |
258 | if( frame->chunk[2] ){ | 241 | if( frame->chunk[2] ){ |
259 | free ( frame->chunk[2] ); | 242 | free ( frame->chunk[2] ); |
260 | frame->chunk[2] = NULL; | 243 | frame->chunk[2] = NULL; |
261 | } | 244 | } |
262 | free ( frame->data ); | 245 | free ( frame->data ); |
263 | } | 246 | } |
264 | 247 | ||
265 | frame->data = xine_xmalloc (frame->sc.output_width | 248 | frame->data = xine_xmalloc (frame->sc.output_width |
266 | * frame->sc.output_height | 249 | * frame->sc.output_height |
267 | * this->bytes_per_pixel ); | 250 | * this->bytes_per_pixel ); |
268 | 251 | ||
269 | if( format == XINE_IMGFMT_YV12 ) { | 252 | if( format == XINE_IMGFMT_YV12 ) { |
270 | frame->frame.pitches[0] = 8*((width + 7) / 8); | 253 | frame->frame.pitches[0] = 8*((width + 7) / 8); |
271 | frame->frame.pitches[1] = 8*((width + 15) / 16); | 254 | frame->frame.pitches[1] = 8*((width + 15) / 16); |
272 | frame->frame.pitches[2] = 8*((width + 15) / 16); | 255 | frame->frame.pitches[2] = 8*((width + 15) / 16); |
273 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); | 256 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); |
274 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); | 257 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); |
275 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); | 258 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); |
276 | 259 | ||
277 | }else{ | 260 | }else{ |
278 | frame->frame.pitches[0] = 8*((width + 3) / 4); | 261 | frame->frame.pitches[0] = 8*((width + 3) / 4); |
279 | 262 | ||
280 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, | 263 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, |
281 | (void **)&frame->chunk[0]); | 264 | (void **)&frame->chunk[0]); |
282 | frame->chunk[1] = NULL; | 265 | frame->chunk[1] = NULL; |
283 | frame->chunk[2] = NULL; | 266 | frame->chunk[2] = NULL; |
284 | } | 267 | } |
285 | 268 | ||
269 | } | ||
270 | |||
271 | static void null_setup_colorspace_converter(opie_frame_t *frame, int flags ) { | ||
272 | switch (flags) { | ||
273 | case VO_TOP_FIELD: | ||
274 | case VO_BOTTOM_FIELD: | ||
275 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
276 | frame->sc.delivered_width, | ||
277 | 16, | ||
278 | 2*frame->frame.pitches[0], | ||
279 | 2*frame->frame.pitches[1], | ||
280 | frame->sc.output_width, | ||
281 | frame->stripe_height, | ||
282 | frame->bytes_per_line*2); | ||
283 | frame->yuv_stride = frame->bytes_per_line*2; | ||
284 | break; | ||
285 | case VO_BOTH_FIELDS: | ||
286 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
287 | frame->sc.delivered_width, | ||
288 | 16, | ||
289 | frame->frame.pitches[0], | ||
290 | frame->frame.pitches[1], | ||
291 | frame->sc.output_width, | ||
292 | frame->stripe_height, | ||
293 | frame->bytes_per_line); | ||
294 | frame->yuv_stride = frame->bytes_per_line; | ||
295 | break; | ||
296 | } | ||
297 | #ifdef LOG | ||
298 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | ||
299 | #endif | ||
300 | } | ||
301 | |||
302 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | ||
303 | uint32_t width, uint32_t height, | ||
304 | double ratio_code, int format, | ||
305 | int flags ){ | ||
306 | null_driver_t* this = (null_driver_t*) self; | ||
307 | opie_frame_t* frame = (opie_frame_t*)img; | ||
308 | |||
309 | #ifdef LOG | ||
310 | fprintf (stderr, "nullvideo: update_frame_format\n"); | ||
311 | #endif | ||
312 | |||
313 | flags &= VO_BOTH_FIELDS; | ||
314 | |||
315 | /* find out if we need to adapt this frame */ | ||
316 | |||
317 | if ((width != frame->sc.delivered_width) | ||
318 | || (height != frame->sc.delivered_height) | ||
319 | || (ratio_code != frame->sc.delivered_ratio) | ||
320 | || (flags != frame->flags) | ||
321 | || (format != frame->format) | ||
322 | || (this->sc.user_ratio != frame->sc.user_ratio) | ||
323 | || (this->gui_width != frame->sc.gui_width) | ||
324 | || (this->gui_height != frame->sc.gui_height)) { | ||
325 | |||
326 | frame->sc.delivered_width = width; | ||
327 | frame->sc.delivered_height = height; | ||
328 | frame->sc.delivered_ratio = ratio_code; | ||
329 | frame->flags = flags; | ||
330 | frame->format = format; | ||
331 | frame->sc.user_ratio = this->sc.user_ratio; | ||
332 | frame->sc.gui_width = this->gui_width; | ||
333 | frame->sc.gui_height = this->gui_height; | ||
334 | frame->sc.gui_pixel_aspect = 1.0; | ||
335 | |||
336 | |||
337 | null_frame_compute_ideal_size(this, frame); | ||
338 | null_frame_compute_rgb_size(this, frame); | ||
339 | null_frame_reallocate(this, frame, width, height, format); | ||
340 | |||
341 | #ifdef LOG | ||
342 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | ||
343 | frame->sc.gui_width, frame->sc.gui_height, | ||
344 | frame->sc.delivered_width, frame->sc.delivered_height, | ||
345 | frame->sc.output_width, frame->sc.output_height); | ||
346 | #endif | ||
347 | |||
348 | |||
349 | |||
286 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; | 350 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; |
287 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; | 351 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; |
288 | 352 | ||
289 | /* | 353 | /* |
290 | * set up colorspace converter | 354 | * set up colorspace converter |
291 | */ | 355 | */ |
356 | null_setup_colorspace_converter(frame, flags); | ||
292 | 357 | ||
293 | switch (flags) { | ||
294 | case VO_TOP_FIELD: | ||
295 | case VO_BOTTOM_FIELD: | ||
296 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
297 | frame->sc.delivered_width, | ||
298 | 16, | ||
299 | 2*frame->frame.pitches[0], | ||
300 | 2*frame->frame.pitches[1], | ||
301 | frame->sc.output_width, | ||
302 | frame->stripe_height, | ||
303 | frame->bytes_per_line*2); | ||
304 | frame->yuv_stride = frame->bytes_per_line*2; | ||
305 | break; | ||
306 | case VO_BOTH_FIELDS: | ||
307 | frame->yuv2rgb->configure (frame->yuv2rgb, | ||
308 | frame->sc.delivered_width, | ||
309 | 16, | ||
310 | frame->frame.pitches[0], | ||
311 | frame->frame.pitches[1], | ||
312 | frame->sc.output_width, | ||
313 | frame->stripe_height, | ||
314 | frame->bytes_per_line); | ||
315 | frame->yuv_stride = frame->bytes_per_line; | ||
316 | break; | ||
317 | } | ||
318 | #ifdef LOG | ||
319 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | ||
320 | #endif | ||
321 | } | 358 | } |
322 | |||
323 | /* | 359 | /* |
324 | * reset dest pointers | 360 | * reset dest pointers |
325 | */ | 361 | */ |
326 | 362 | ||
327 | if (frame->data) { | 363 | if (frame->data) { |
328 | switch (flags) { | 364 | switch (flags) { |
329 | case VO_TOP_FIELD: | 365 | case VO_TOP_FIELD: |
330 | frame->rgb_dst = (uint8_t *)frame->data; | 366 | frame->rgb_dst = (uint8_t *)frame->data; |
331 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 367 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
332 | break; | 368 | break; |
333 | case VO_BOTTOM_FIELD: | 369 | case VO_BOTTOM_FIELD: |
334 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 370 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
335 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 371 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
336 | break; | 372 | break; |
337 | case VO_BOTH_FIELDS: | 373 | case VO_BOTH_FIELDS: |
338 | frame->rgb_dst = (uint8_t *)frame->data; | 374 | frame->rgb_dst = (uint8_t *)frame->data; |
339 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; | 375 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; |
340 | break; | 376 | break; |
377 | } | ||
341 | } | 378 | } |
342 | } | ||
343 | } | 379 | } |
344 | 380 | ||
345 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ | 381 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ |
346 | null_driver_t* this = (null_driver_t*) self; | 382 | null_driver_t* this = (null_driver_t*) self; |
347 | opie_frame_t* frame = (opie_frame_t*)frame_gen; | 383 | opie_frame_t* frame = (opie_frame_t*)frame_gen; |
348 | display_xine_frame_t display = this->frameDis; | 384 | display_xine_frame_t display = this->frameDis; |
349 | 385 | ||
350 | if (!this->m_show_video) | 386 | if (!this->m_show_video) |
351 | return; | 387 | return; |
352 | 388 | ||
353 | if( display != NULL ) { | 389 | if( display != NULL ) { |
354 | (*display)(this->caller, frame->data, | 390 | (*display)(this->caller, frame->data, |
355 | frame->sc.output_width, frame->sc.output_height, | 391 | frame->sc.output_width, frame->sc.output_height, |
356 | frame->bytes_per_line ); | 392 | frame->bytes_per_line ); |
357 | } | 393 | } |
358 | 394 | ||
359 | frame->frame.free(&frame->frame); | 395 | frame->frame.free(&frame->frame); |
360 | } | 396 | } |
361 | 397 | ||
362 | 398 | ||
363 | /* blending related */ | 399 | /* blending related */ |
364 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, | 400 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, |
365 | vo_overlay_t *overlay, | 401 | vo_overlay_t *overlay, |
366 | opie_frame_t *frame) { | 402 | opie_frame_t *frame) { |
403 | this = this; | ||
404 | |||
405 | |||
367 | int i; | 406 | int i; |
368 | clut_t* clut = (clut_t*) overlay->color; | 407 | clut_t* clut = (clut_t*) overlay->color; |
369 | if (!overlay->rgb_clut) { | 408 | if (!overlay->rgb_clut) { |
370 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 409 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
371 | *((uint32_t *)&clut[i]) = | 410 | *((uint32_t *)&clut[i]) = |
372 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, | 411 | frame->yuv2rgb-> |
373 | clut[i].y, clut[i].cb, | 412 | yuv2rgb_single_pixel_fun (frame->yuv2rgb, |
374 | clut[i].cr); | 413 | clut[i].y, clut[i].cb, |
414 | clut[i].cr); | ||
375 | } | 415 | } |
376 | overlay->rgb_clut++; | 416 | overlay->rgb_clut++; |
377 | } | 417 | } |
378 | if (!overlay->clip_rgb_clut) { | 418 | if (!overlay->clip_rgb_clut) { |
379 | clut = (clut_t*) overlay->clip_color; | 419 | clut = (clut_t*) overlay->clip_color; |
380 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 420 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
381 | *((uint32_t *)&clut[i]) = | 421 | *((uint32_t *)&clut[i]) = |
382 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, | 422 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, |
383 | clut[i].y, clut[i].cb, clut[i].cr); | 423 | clut[i].y, clut[i].cb, clut[i].cr); |
384 | } | 424 | } |
385 | overlay->clip_rgb_clut++; | 425 | overlay->clip_rgb_clut++; |
386 | } | 426 | } |
387 | } | 427 | } |
388 | 428 | ||
389 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, | 429 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, |
390 | vo_overlay_t *overlay) { | 430 | vo_overlay_t *overlay) { |
391 | null_driver_t *this = (null_driver_t *) this_gen; | 431 | null_driver_t *this = (null_driver_t *) this_gen; |
392 | opie_frame_t *frame = (opie_frame_t *) frame_gen; | 432 | opie_frame_t *frame = (opie_frame_t *) frame_gen; |
393 | 433 | ||
394 | if(!this->m_show_video || frame->sc.output_width == 0 | 434 | if(!this->m_show_video || frame->sc.output_width == 0 |
395 | || frame->sc.output_height== 0) | 435 | || frame->sc.output_height== 0) |
396 | return; | 436 | return; |
397 | 437 | ||
398 | /* Alpha Blend here */ | 438 | /* Alpha Blend here */ |
399 | if (overlay->rle) { | 439 | if (overlay->rle) { |
400 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) | 440 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) |
401 | null_overlay_clut_yuv2rgb(this,overlay,frame); | 441 | null_overlay_clut_yuv2rgb(this,overlay,frame); |
402 | 442 | ||
403 | switch(this->bpp) { | 443 | switch(this->bpp) { |
404 | case 16: | 444 | case 16: |
405 | blend_rgb16( (uint8_t *)frame->data, overlay, | 445 | blend_rgb16((uint8_t *)frame->data, |
406 | frame->sc.output_width, frame->sc.output_height, | 446 | overlay, |
407 | frame->sc.delivered_width, frame->sc.delivered_height); | 447 | frame->sc.output_width, |
408 | break; | 448 | frame->sc.output_height, |
409 | case 24: | 449 | frame->sc.delivered_width, |
410 | blend_rgb24( (uint8_t *)frame->data, overlay, | 450 | frame->sc.delivered_height); |
411 | frame->sc.output_width, frame->sc.output_height, | 451 | break; |
412 | frame->sc.delivered_width, frame->sc.delivered_height); | 452 | case 24: |
413 | break; | 453 | blend_rgb24((uint8_t *)frame->data, |
414 | case 32: | 454 | overlay, |
415 | blend_rgb32( (uint8_t *)frame->data, overlay, | 455 | frame->sc.output_width, |
416 | frame->sc.output_width, frame->sc.output_height, | 456 | frame->sc.output_height, |
417 | frame->sc.delivered_width, frame->sc.delivered_height); | 457 | frame->sc.delivered_width, |
418 | break; | 458 | frame->sc.delivered_height); |
419 | default: | 459 | break; |
420 | /* It should never get here */ | 460 | case 32: |
421 | break; | 461 | blend_rgb32((uint8_t *)frame->data, |
462 | overlay, | ||
463 | frame->sc.output_width, | ||
464 | frame->sc.output_height, | ||
465 | frame->sc.delivered_width, | ||
466 | frame->sc.delivered_height); | ||
467 | break; | ||
468 | default: | ||
469 | /* It should never get here */ | ||
470 | break; | ||
422 | } | 471 | } |
423 | } | 472 | } |
424 | } | 473 | } |
425 | 474 | ||
426 | 475 | ||
427 | static int null_get_property( vo_driver_t* self, | 476 | static int null_get_property( vo_driver_t* self, int property ){ |
428 | int property ){ | 477 | #if 0 |
478 | null_driver_t *this = (null_driver_t *)self; | ||
479 | |||
480 | switch(property) | ||
481 | { | ||
482 | case VO_PROP_ASPECT_RATIO: | ||
483 | return this->sc.user_ratio; | ||
484 | case VO_PROP_BRIGHTNESS: | ||
485 | return this->yuv2rgb_brightness; | ||
486 | case VO_PROP_WINDOW_WIDTH: | ||
487 | return this->sc.gui_width; | ||
488 | case VO_PROP_WINDOW_HEIGHT: | ||
489 | return this->sc.gui_height; | ||
490 | default: | ||
491 | break; | ||
492 | } | ||
493 | #else | ||
494 | property = property; | ||
495 | self = self; | ||
496 | #endif | ||
497 | |||
429 | return 0; | 498 | return 0; |
430 | } | 499 | } |
431 | static int null_set_property( vo_driver_t* self, | 500 | static int null_set_property( vo_driver_t* self, int property, |
432 | int property, | ||
433 | int value ){ | 501 | int value ){ |
502 | #if 0 | ||
503 | null_driver_t *this = (null_driver_t *)self; | ||
504 | |||
505 | switch(property) | ||
506 | { | ||
507 | case VO_PROP_ASPECT_RATIO: | ||
508 | if(value>=XINE_VO_ASPECT_NUM_RATIOS) | ||
509 | value = XINE_VO_ASPECT_AUTO; | ||
510 | this->sc.user_ratio = value; | ||
511 | break; | ||
512 | case VO_PROP_BRIGHTNESS: | ||
513 | this->yuv2rgb_brightness = value; | ||
514 | this->yuv2rgb_factory-> | ||
515 | set_csc_levels(this->yuv2rgb_factory, value, 128, 128); | ||
516 | break; | ||
517 | default: | ||
518 | break; | ||
519 | } | ||
520 | #else | ||
521 | self = self; | ||
522 | property = property; | ||
523 | #endif | ||
524 | |||
434 | return value; | 525 | return value; |
435 | } | 526 | } |
436 | static void null_get_property_min_max( vo_driver_t* self, | 527 | static void null_get_property_min_max( vo_driver_t* self, |
437 | int property, int *min, | 528 | int property, int *min, |
438 | int *max ){ | 529 | int *max ){ |
530 | self = self; | ||
531 | property = property; | ||
532 | |||
439 | *max = 0; | 533 | *max = 0; |
440 | *min = 0; | 534 | *min = 0; |
441 | } | 535 | } |
442 | static int null_gui_data_exchange( vo_driver_t* self, | 536 | static int null_gui_data_exchange( vo_driver_t* self, |
443 | int data_type, | 537 | int data_type, |
444 | void *data ){ | 538 | void *data ){ |
539 | self = self; | ||
540 | data_type = data_type; | ||
541 | data = data; | ||
542 | |||
445 | return 0; | 543 | return 0; |
446 | } | 544 | } |
447 | 545 | ||
448 | static void null_dispose ( vo_driver_t* self ){ | 546 | static void null_dispose ( vo_driver_t* self ){ |
449 | null_driver_t* this = (null_driver_t*)self; | 547 | null_driver_t* this = (null_driver_t*)self; |
450 | free ( this ); | 548 | free ( this ); |
451 | } | 549 | } |
452 | static int null_redraw_needed( vo_driver_t* self ){ | 550 | static int null_redraw_needed( vo_driver_t* self ){ |
453 | return 0; | 551 | self = self; |
552 | |||
553 | return 0; | ||
454 | } | 554 | } |
455 | 555 | ||
456 | 556 | ||
457 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | 557 | xine_video_port_t* init_video_out_plugin( xine_t *xine, |
458 | void* video, | 558 | void* video, |
459 | display_xine_frame_t frameDisplayFunc, | 559 | display_xine_frame_t frameDisplayFunc, |
460 | void *userData ){ | 560 | void *userData ){ |
561 | video = video; | ||
562 | |||
563 | |||
461 | null_driver_t *vo; | 564 | null_driver_t *vo; |
462 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); | 565 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); |
463 | 566 | ||
464 | /* memset? */ | 567 | /* memset? */ |
465 | memset(vo,0, sizeof(null_driver_t ) ); | 568 | memset(vo,0, sizeof(null_driver_t ) ); |
466 | 569 | ||
467 | vo_scale_init (&vo->sc, 0, 0, xine->config); | 570 | _x_vo_scale_init (&vo->sc, 0, 0, xine->config); |
468 | 571 | ||
469 | vo->sc.gui_pixel_aspect = 1.0; | 572 | vo->sc.gui_pixel_aspect = 1.0; |
470 | 573 | ||
471 | vo->m_show_video = 0; // false | 574 | vo->m_show_video = 0; // false |
472 | vo->m_video_fullscreen = 0; | 575 | vo->m_video_fullscreen = 0; |
473 | vo->m_is_scaling = 0; | 576 | vo->m_is_scaling = 0; |
474 | vo->display_ratio = 1.0; | 577 | vo->display_ratio = 1.0; |
475 | vo->gui_width = 16; | 578 | vo->gui_width = 16; |
476 | vo->gui_height = 8; | 579 | vo->gui_height = 8; |
477 | vo->frameDis = NULL; | 580 | vo->frameDis = NULL; |
478 | 581 | ||
479 | /* install callback handlers*/ | 582 | /* install callback handlers*/ |
480 | vo->vo_driver.get_capabilities = null_get_capabilities; | 583 | vo->vo_driver.get_capabilities = null_get_capabilities; |
481 | vo->vo_driver.alloc_frame = null_alloc_frame; | 584 | vo->vo_driver.alloc_frame = null_alloc_frame; |
482 | vo->vo_driver.update_frame_format = null_update_frame_format; | 585 | vo->vo_driver.update_frame_format = null_update_frame_format; |
483 | vo->vo_driver.display_frame = null_display_frame; | 586 | vo->vo_driver.display_frame = null_display_frame; |
484 | vo->vo_driver.overlay_blend = null_overlay_blend; | 587 | vo->vo_driver.overlay_blend = null_overlay_blend; |
485 | vo->vo_driver.get_property = null_get_property; | 588 | vo->vo_driver.get_property = null_get_property; |
486 | vo->vo_driver.set_property = null_set_property; | 589 | vo->vo_driver.set_property = null_set_property; |
487 | vo->vo_driver.get_property_min_max = null_get_property_min_max; | 590 | vo->vo_driver.get_property_min_max = null_get_property_min_max; |
488 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; | 591 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; |
489 | vo->vo_driver.dispose = null_dispose; | 592 | vo->vo_driver.dispose = null_dispose; |
490 | vo->vo_driver.redraw_needed = null_redraw_needed; | 593 | vo->vo_driver.redraw_needed = null_redraw_needed; |
491 | 594 | ||
492 | 595 | ||
493 | /* capabilities */ | 596 | /* capabilities */ |
494 | vo->m_capabilities = /* VO_CAP_COPIES_IMAGE | */ VO_CAP_YUY2 | VO_CAP_YV12; | 597 | vo->m_capabilities = VO_CAP_YUY2 | VO_CAP_YV12; |
495 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, | 598 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, |
496 | vo->yuv2rgb_cmap); | 599 | vo->yuv2rgb_cmap); |
497 | 600 | ||
498 | vo->caller = userData; | 601 | vo->caller = userData; |
499 | vo->frameDis = frameDisplayFunc; | 602 | vo->frameDis = frameDisplayFunc; |
500 | 603 | ||
501 | /* return ( vo_driver_t*) vo; */ | 604 | return _x_vo_new_port(xine, &vo->vo_driver, 0); |
502 | return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); | ||
503 | } | 605 | } |
504 | 606 | ||
505 | #if 0 | ||
506 | static vo_info_t vo_info_null = { | ||
507 | 5, | ||
508 | XINE_VISUAL_TYPE_FB | ||
509 | }; | ||
510 | |||
511 | vo_info_t *get_video_out_plugin_info(){ | ||
512 | vo_info_null.description = ("xine video output plugin using null device"); | ||
513 | return &vo_info_null; | ||
514 | } | ||
515 | |||
516 | #endif | ||
517 | 607 | ||
518 | /* this is special for this device */ | 608 | /* this is special for this device */ |
519 | /** | 609 | /** |
520 | * We know that we will be controled by the XINE LIB++ | 610 | * We know that we will be controled by the XINE LIB++ |
521 | */ | 611 | */ |
522 | 612 | ||
523 | /** | 613 | /** |
524 | * | 614 | * |
525 | */ | 615 | */ |
526 | int null_is_showing_video( xine_vo_driver_t* self ){ | 616 | int null_is_showing_video( xine_vo_driver_t* self ){ |
527 | null_driver_t* this = (null_driver_t*)self->driver; | 617 | null_driver_t* this = (null_driver_t*)self->driver; |
528 | return this->m_show_video; | 618 | return this->m_show_video; |
529 | } | 619 | } |
530 | void null_set_show_video( xine_vo_driver_t* self, int show ) { | 620 | void null_set_show_video( xine_vo_driver_t* self, int show ) { |
531 | ((null_driver_t*)self->driver)->m_show_video = show; | 621 | ((null_driver_t*)self->driver)->m_show_video = show; |
532 | } | 622 | } |
@@ -551,34 +641,33 @@ void null_set_scaling( xine_vo_driver_t* self, int scale ) { | |||
551 | } | 641 | } |
552 | 642 | ||
553 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { | 643 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { |
554 | ((null_driver_t*)self->driver)->gui_width = width; | 644 | ((null_driver_t*)self->driver)->gui_width = width; |
555 | } | 645 | } |
556 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { | 646 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { |
557 | ((null_driver_t*)self->driver)->gui_height = height; | 647 | ((null_driver_t*)self->driver)->gui_height = height; |
558 | } | 648 | } |
559 | 649 | ||
560 | 650 | ||
561 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | 651 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { |
562 | null_driver_t* this = (null_driver_t*)self->driver; | 652 | null_driver_t* this = (null_driver_t*)self->driver; |
563 | 653 | ||
564 | this->bytes_per_pixel = (depth + 7 ) / 8; | 654 | this->bytes_per_pixel = (depth + 7 ) / 8; |
565 | this->bpp = this->bytes_per_pixel * 8; | 655 | this->bpp = this->bytes_per_pixel * 8; |
566 | this->depth = depth; | 656 | this->depth = depth; |
567 | printf("depth %d %d\n", depth, this->bpp); | 657 | |
568 | printf("pixeltype %d\n", rgb ); | ||
569 | switch ( this->depth ) { | 658 | switch ( this->depth ) { |
570 | case 32: | 659 | case 32: |
571 | if( rgb == 0 ) | 660 | if( rgb == 0 ) |
572 | this->yuv2rgb_mode = MODE_32_RGB; | 661 | this->yuv2rgb_mode = MODE_32_RGB; |
573 | else | 662 | else |
574 | this->yuv2rgb_mode = MODE_32_BGR; | 663 | this->yuv2rgb_mode = MODE_32_BGR; |
575 | case 24: | 664 | case 24: |
576 | if( this->bpp == 32 ) { | 665 | if( this->bpp == 32 ) { |
577 | if( rgb == 0 ) { | 666 | if( rgb == 0 ) { |
578 | this->yuv2rgb_mode = MODE_32_RGB; | 667 | this->yuv2rgb_mode = MODE_32_RGB; |
579 | } else { | 668 | } else { |
580 | this->yuv2rgb_mode = MODE_32_BGR; | 669 | this->yuv2rgb_mode = MODE_32_BGR; |
581 | } | 670 | } |
582 | }else{ | 671 | }else{ |
583 | if( rgb == 0 ) | 672 | if( rgb == 0 ) |
584 | this->yuv2rgb_mode = MODE_24_RGB; | 673 | this->yuv2rgb_mode = MODE_24_RGB; |
@@ -615,28 +704,32 @@ void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | |||
615 | 704 | ||
616 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, | 705 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, |
617 | void* user_data ) { | 706 | void* user_data ) { |
618 | null_driver_t* this = (null_driver_t*) self->driver; | 707 | null_driver_t* this = (null_driver_t*) self->driver; |
619 | this->caller = user_data; | 708 | this->caller = user_data; |
620 | this->frameDis = t; | 709 | this->frameDis = t; |
621 | } | 710 | } |
622 | 711 | ||
623 | void null_preload_decoders( xine_stream_t *stream ) | 712 | void null_preload_decoders( xine_stream_t *stream ) |
624 | { | 713 | { |
625 | static const uint32_t preloadedAudioDecoders[] = { BUF_AUDIO_MPEG, BUF_AUDIO_VORBIS }; | 714 | static const uint32_t preloadedAudioDecoders[] = { BUF_AUDIO_MPEG, BUF_AUDIO_VORBIS }; |
626 | static const uint8_t preloadedAudioDecoderCount = sizeof( preloadedAudioDecoders ) / sizeof( preloadedAudioDecoders[ 0 ] ); | 715 | static const uint8_t preloadedAudioDecoderCount = sizeof( preloadedAudioDecoders ) / sizeof( preloadedAudioDecoders[ 0 ] ); |
627 | static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; | 716 | static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; |
628 | static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); | 717 | static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); |
629 | 718 | ||
630 | uint8_t i; | 719 | uint8_t i; |
720 | #if 0 | ||
631 | 721 | ||
632 | for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { | 722 | for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { |
633 | audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); | 723 | audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); |
724 | decoder = decoder; | ||
634 | /* free_audio_decoder( stream, decoder ); */ | 725 | /* free_audio_decoder( stream, decoder ); */ |
635 | } | 726 | } |
636 | 727 | ||
637 | for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { | 728 | for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { |
638 | video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); | 729 | video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); |
730 | decoder = decoder; | ||
639 | /* free_video_decoder( stream, decoder ); */ | 731 | /* free_video_decoder( stream, decoder ); */ |
640 | } | 732 | } |
733 | #endif | ||
641 | } | 734 | } |
642 | 735 | ||
diff --git a/noncore/multimedia/opieplayer2/opieplayer2.pro b/noncore/multimedia/opieplayer2/opieplayer2.pro index b9a8d6d..8166658 100644 --- a/noncore/multimedia/opieplayer2/opieplayer2.pro +++ b/noncore/multimedia/opieplayer2/opieplayer2.pro | |||
@@ -2,28 +2,28 @@ CONFIG = qt warn_on quick-app | |||
2 | HEADERS = playlistselection.h mediaplayerstate.h xinecontrol.h \ | 2 | HEADERS = playlistselection.h mediaplayerstate.h xinecontrol.h \ |
3 | videowidget.h audiowidget.h playlistwidget.h om3u.h mediaplayer.h inputDialog.h \ | 3 | videowidget.h audiowidget.h playlistwidget.h om3u.h mediaplayer.h inputDialog.h \ |
4 | frame.h lib.h xinevideowidget.h volumecontrol.h playlistwidgetgui.h\ | 4 | frame.h lib.h xinevideowidget.h volumecontrol.h playlistwidgetgui.h\ |
5 | alphablend.h yuv2rgb.h threadutil.h mediawidget.h playlistview.h playlistfileview.h \ | 5 | alphablend.h yuv2rgb.h threadutil.h mediawidget.h playlistview.h playlistfileview.h \ |
6 | skin.h | 6 | skin.h |
7 | SOURCES = main.cpp \ | 7 | SOURCES = main.cpp \ |
8 | playlistselection.cpp mediaplayerstate.cpp xinecontrol.cpp \ | 8 | playlistselection.cpp mediaplayerstate.cpp xinecontrol.cpp \ |
9 | videowidget.cpp audiowidget.cpp playlistwidget.cpp om3u.cpp mediaplayer.cpp inputDialog.cpp \ | 9 | videowidget.cpp audiowidget.cpp playlistwidget.cpp om3u.cpp mediaplayer.cpp inputDialog.cpp \ |
10 | frame.cpp lib.cpp nullvideo.c xinevideowidget.cpp volumecontrol.cpp \ | 10 | frame.cpp lib.cpp nullvideo.c xinevideowidget.cpp volumecontrol.cpp \ |
11 | playlistwidgetgui.cpp\ | 11 | playlistwidgetgui.cpp\ |
12 | alphablend.c yuv2rgb.c yuv2rgb_arm2.c yuv2rgb_arm4l.S \ | 12 | alphablend.c yuv2rgb.c yuv2rgb_arm2.c yuv2rgb_arm4l.S \ |
13 | threadutil.cpp mediawidget.cpp playlistview.cpp playlistfileview.cpp \ | 13 | threadutil.cpp mediawidget.cpp playlistview.cpp playlistfileview.cpp \ |
14 | skin.cpp | 14 | skin.cpp |
15 | TARGET = opieplayer2 | 15 | TARGET = opieplayer2 |
16 | INCLUDEPATH += $(OPIEDIR)/include | 16 | INCLUDEPATH += $(OPIEDIR)/include |
17 | DEPENDPATH += $(OPIEDIR)/include | 17 | DEPENDPATH += $(OPIEDIR)/include |
18 | LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lxine -lstdc++ | 18 | LIBS += -lqpe -lpthread -lopiecore2 -lopieui2 -lqtaux2 -lxine |
19 | MOC_DIR = qpeobj | 19 | MOC_DIR = qpeobj |
20 | OBJECTS_DIR = qpeobj | 20 | OBJECTS_DIR = qpeobj |
21 | 21 | ||
22 | include ( $(OPIEDIR)/include.pro ) | 22 | include ( $(OPIEDIR)/include.pro ) |
23 | 23 | ||
24 | !isEmpty( LIBXINE_INC_DIR ) { | 24 | !isEmpty( LIBXINE_INC_DIR ) { |
25 | INCLUDEPATH = $$LIBXINE_INC_DIR $$INCLUDEPATH | 25 | INCLUDEPATH = $$LIBXINE_INC_DIR $$INCLUDEPATH |
26 | } | 26 | } |
27 | !isEmpty( LIBXINE_LIB_DIR ) { | 27 | !isEmpty( LIBXINE_LIB_DIR ) { |
28 | LIBS = -L$$LIBXINE_LIB_DIR $$LIBS | 28 | LIBS = -L$$LIBXINE_LIB_DIR $$LIBS |
29 | } | 29 | } |
diff --git a/noncore/multimedia/opieplayer2/xinevideowidget.cpp b/noncore/multimedia/opieplayer2/xinevideowidget.cpp index 15c611f..1ac9277 100644 --- a/noncore/multimedia/opieplayer2/xinevideowidget.cpp +++ b/noncore/multimedia/opieplayer2/xinevideowidget.cpp | |||
@@ -38,32 +38,33 @@ | |||
38 | #include <qapplication.h> | 38 | #include <qapplication.h> |
39 | 39 | ||
40 | #include <qpe/resource.h> | 40 | #include <qpe/resource.h> |
41 | 41 | ||
42 | #include "xinevideowidget.h" | 42 | #include "xinevideowidget.h" |
43 | 43 | ||
44 | 44 | ||
45 | // 0 deg rot: copy a line from src to dst (use libc memcpy) | 45 | // 0 deg rot: copy a line from src to dst (use libc memcpy) |
46 | 46 | ||
47 | // 180 deg rot: copy a line from src to dst reversed | 47 | // 180 deg rot: copy a line from src to dst reversed |
48 | 48 | ||
49 | /* | 49 | /* |
50 | * This code relies the len be a multiply of 16bit | 50 | * This code relies the len be a multiply of 16bit |
51 | */ | 51 | */ |
52 | static inline void memcpy_rev ( void *_dst, void *_src, size_t len ) | 52 | static inline void memcpy_rev ( void *_dst, void *_src, size_t len ) |
53 | { | 53 | { |
54 | |||
54 | /* | 55 | /* |
55 | * move the source to the end | 56 | * move the source to the end |
56 | */ | 57 | */ |
57 | char *src_c = static_cast<char*>(_src) + len; | 58 | char *src_c = static_cast<char*>(_src) + len; |
58 | 59 | ||
59 | /* | 60 | /* |
60 | * as we copy by 16bit and not 8bit | 61 | * as we copy by 16bit and not 8bit |
61 | * devide the length by two | 62 | * devide the length by two |
62 | */ | 63 | */ |
63 | len >>= 1; | 64 | len >>= 1; |
64 | 65 | ||
65 | short int* dst = static_cast<short int*>( _dst ); | 66 | short int* dst = static_cast<short int*>( _dst ); |
66 | short int* src = reinterpret_cast<short int*>( src_c ); | 67 | short int* src = reinterpret_cast<short int*>( src_c ); |
67 | 68 | ||
68 | /* | 69 | /* |
69 | * Increment dst after assigning | 70 | * Increment dst after assigning |