-rw-r--r-- | noncore/multimedia/opieplayer2/lib.cpp | 5 | ||||
-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 21 |
2 files changed, 26 insertions, 0 deletions
diff --git a/noncore/multimedia/opieplayer2/lib.cpp b/noncore/multimedia/opieplayer2/lib.cpp index 8896cfe..d04af08 100644 --- a/noncore/multimedia/opieplayer2/lib.cpp +++ b/noncore/multimedia/opieplayer2/lib.cpp | |||
@@ -1,395 +1,400 @@ | |||
1 | /* | 1 | /* |
2 | This file is part of the Opie Project | 2 | This file is part of the Opie Project |
3 | 3 | ||
4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> | 4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> |
5 | Copyright (c) 2002 LJP <> | 5 | Copyright (c) 2002 LJP <> |
6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> | 6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> |
7 | =. | 7 | =. |
8 | .=l. | 8 | .=l. |
9 | .>+-= | 9 | .>+-= |
10 | _;:, .> :=|. This program is free software; you can | 10 | _;:, .> :=|. This program is free software; you can |
11 | .> <`_, > . <= redistribute it and/or modify it under | 11 | .> <`_, > . <= redistribute it and/or modify it under |
12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public | 12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public |
13 | .="- .-=="i, .._ License as published by the Free Software | 13 | .="- .-=="i, .._ License as published by the Free Software |
14 | - . .-<_> .<> Foundation; either version 2 of the License, | 14 | - . .-<_> .<> Foundation; either version 2 of the License, |
15 | ._= =} : or (at your option) any later version. | 15 | ._= =} : or (at your option) any later version. |
16 | .%`+i> _;_. | 16 | .%`+i> _;_. |
17 | .i_,=:_. -<s. This program is distributed in the hope that | 17 | .i_,=:_. -<s. This program is distributed in the hope that |
18 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; | 18 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; |
19 | : .. .:, . . . without even the implied warranty of | 19 | : .. .:, . . . without even the implied warranty of |
20 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A | 20 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A |
21 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU | 21 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU |
22 | ..}^=.= = ; Library General Public License for more | 22 | ..}^=.= = ; Library General Public License for more |
23 | ++= -. .` .: details. | 23 | ++= -. .` .: details. |
24 | : = ...= . :.=- | 24 | : = ...= . :.=- |
25 | -. .:....=;==+<; You should have received a copy of the GNU | 25 | -. .:....=;==+<; You should have received a copy of the GNU |
26 | -_. . . )=. = Library General Public License along with | 26 | -_. . . )=. = Library General Public License along with |
27 | -- :-=` this library; see the file COPYING.LIB. | 27 | -- :-=` this library; see the file COPYING.LIB. |
28 | If not, write to the Free Software Foundation, | 28 | If not, write to the Free Software Foundation, |
29 | Inc., 59 Temple Place - Suite 330, | 29 | Inc., 59 Temple Place - Suite 330, |
30 | Boston, MA 02111-1307, USA. | 30 | Boston, MA 02111-1307, USA. |
31 | 31 | ||
32 | */ | 32 | */ |
33 | 33 | ||
34 | #include <stdio.h> | 34 | #include <stdio.h> |
35 | #include <stdlib.h> | 35 | #include <stdlib.h> |
36 | #include <qimage.h> | 36 | #include <qimage.h> |
37 | #include <qtextstream.h> | 37 | #include <qtextstream.h> |
38 | #include <qpe/resource.h> | 38 | #include <qpe/resource.h> |
39 | 39 | ||
40 | #include <qfile.h> | 40 | #include <qfile.h> |
41 | #include <qdir.h> | 41 | #include <qdir.h> |
42 | 42 | ||
43 | #include <qgfx_qws.h> | 43 | #include <qgfx_qws.h> |
44 | #include <qdirectpainter_qws.h> | 44 | #include <qdirectpainter_qws.h> |
45 | 45 | ||
46 | #include <assert.h> | 46 | #include <assert.h> |
47 | 47 | ||
48 | #include "xinevideowidget.h" | 48 | #include "xinevideowidget.h" |
49 | #include "frame.h" | 49 | #include "frame.h" |
50 | #include "lib.h" | 50 | #include "lib.h" |
51 | 51 | ||
52 | 52 | ||
53 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, | 53 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, |
54 | int width, int height,int bytes ); | 54 | int width, int height,int bytes ); |
55 | 55 | ||
56 | extern "C" { | 56 | extern "C" { |
57 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); | 57 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, void* video, display_xine_frame_t, void * ); |
58 | int null_is_showing_video( const xine_vo_driver_t* self ); | 58 | int null_is_showing_video( const xine_vo_driver_t* self ); |
59 | void null_set_show_video( const xine_vo_driver_t* self, int show ); | 59 | void null_set_show_video( const xine_vo_driver_t* self, int show ); |
60 | int null_is_fullscreen( const xine_vo_driver_t* self ); | 60 | int null_is_fullscreen( const xine_vo_driver_t* self ); |
61 | void null_set_fullscreen( const xine_vo_driver_t* self, int screen ); | 61 | void null_set_fullscreen( const xine_vo_driver_t* self, int screen ); |
62 | int null_is_scaling( const xine_vo_driver_t* self ); | 62 | int null_is_scaling( const xine_vo_driver_t* self ); |
63 | void null_set_scaling( const xine_vo_driver_t* self, int scale ); | 63 | void null_set_scaling( const xine_vo_driver_t* self, int scale ); |
64 | void null_set_gui_width( const xine_vo_driver_t* self, int width ); | 64 | void null_set_gui_width( const xine_vo_driver_t* self, int width ); |
65 | void null_set_gui_height( const xine_vo_driver_t* self, int height ); | 65 | void null_set_gui_height( const xine_vo_driver_t* self, int height ); |
66 | void null_set_mode( const xine_vo_driver_t* self, int depth, int rgb ); | 66 | void null_set_mode( const xine_vo_driver_t* self, int depth, int rgb ); |
67 | void null_set_videoGamma( const xine_vo_driver_t* self , int value ); | 67 | void null_set_videoGamma( const xine_vo_driver_t* self , int value ); |
68 | void null_display_handler( const xine_vo_driver_t* self, display_xine_frame_t t, void* user_data ); | 68 | void null_display_handler( const xine_vo_driver_t* self, display_xine_frame_t t, void* user_data ); |
69 | |||
70 | void null_preload_decoders( xine_stream_t *stream ); | ||
69 | } | 71 | } |
70 | 72 | ||
71 | using namespace XINE; | 73 | using namespace XINE; |
72 | 74 | ||
73 | Lib::Lib( InitializationMode initMode, XineVideoWidget* widget ) | 75 | Lib::Lib( InitializationMode initMode, XineVideoWidget* widget ) |
74 | { | 76 | { |
75 | m_initialized = false; | 77 | m_initialized = false; |
76 | m_duringInitialization = false; | 78 | m_duringInitialization = false; |
77 | m_video = false; | 79 | m_video = false; |
78 | m_wid = widget; | 80 | m_wid = widget; |
79 | printf("Lib"); | 81 | printf("Lib"); |
80 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; | 82 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; |
81 | // get the configuration | 83 | // get the configuration |
82 | 84 | ||
83 | // not really OO, should be an extra class, later | 85 | // not really OO, should be an extra class, later |
84 | if ( !QFile::exists(configPath) ) { | 86 | if ( !QFile::exists(configPath) ) { |
85 | QFile f(configPath); | 87 | QFile f(configPath); |
86 | f.open(IO_WriteOnly); | 88 | f.open(IO_WriteOnly); |
87 | QTextStream ts( &f ); | 89 | QTextStream ts( &f ); |
88 | ts << "misc.memcpy_method:glibc\n"; | 90 | ts << "misc.memcpy_method:glibc\n"; |
89 | f.close(); | 91 | f.close(); |
90 | } | 92 | } |
91 | 93 | ||
92 | if ( initMode == InitializeImmediately ) { | 94 | if ( initMode == InitializeImmediately ) { |
93 | initialize(); | 95 | initialize(); |
94 | m_initialized = true; | 96 | m_initialized = true; |
95 | } | 97 | } |
96 | else | 98 | else |
97 | start(); | 99 | start(); |
98 | } | 100 | } |
99 | 101 | ||
100 | void Lib::run() | 102 | void Lib::run() |
101 | { | 103 | { |
102 | qDebug( "Lib::run() started" ); | 104 | qDebug( "Lib::run() started" ); |
103 | initialize(); | 105 | initialize(); |
104 | m_initialized = true; | 106 | m_initialized = true; |
105 | qDebug( "Lib::run() finished" ); | 107 | qDebug( "Lib::run() finished" ); |
106 | } | 108 | } |
107 | 109 | ||
108 | void Lib::initialize() | 110 | void Lib::initialize() |
109 | { | 111 | { |
110 | m_duringInitialization = true; | 112 | m_duringInitialization = true; |
111 | m_xine = xine_new( ); | 113 | m_xine = xine_new( ); |
112 | 114 | ||
113 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; | 115 | QString configPath = QDir::homeDirPath() + "/Settings/opiexine.cf"; |
114 | xine_config_load( m_xine, QFile::encodeName( configPath ) ); | 116 | xine_config_load( m_xine, QFile::encodeName( configPath ) ); |
115 | 117 | ||
116 | xine_init( m_xine ); | 118 | xine_init( m_xine ); |
117 | 119 | ||
118 | // allocate oss for sound | 120 | // allocate oss for sound |
119 | // and fb for framebuffer | 121 | // and fb for framebuffer |
120 | m_audioOutput = xine_open_audio_driver( m_xine, "oss", NULL ); | 122 | m_audioOutput = xine_open_audio_driver( m_xine, "oss", NULL ); |
121 | m_videoOutput = ::init_video_out_plugin( m_xine, NULL, xine_display_frame, this ); | 123 | m_videoOutput = ::init_video_out_plugin( m_xine, NULL, xine_display_frame, this ); |
122 | 124 | ||
123 | 125 | ||
124 | //xine_open_video_driver( m_xine, NULL, XINE_VISUAL_TYPE_FB, NULL); | 126 | //xine_open_video_driver( m_xine, NULL, XINE_VISUAL_TYPE_FB, NULL); |
125 | 127 | ||
126 | 128 | ||
127 | // null_display_handler( m_videoOutput, xine_display_frame, this ); | 129 | // null_display_handler( m_videoOutput, xine_display_frame, this ); |
128 | 130 | ||
129 | m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); | 131 | m_stream = xine_stream_new (m_xine, m_audioOutput, m_videoOutput ); |
130 | 132 | ||
131 | if (m_wid != 0 ) { | 133 | if (m_wid != 0 ) { |
132 | printf( "!0\n" ); | 134 | printf( "!0\n" ); |
133 | setWidget( m_wid ); | 135 | setWidget( m_wid ); |
134 | } | 136 | } |
135 | 137 | ||
136 | m_queue = xine_event_new_queue (m_stream); | 138 | m_queue = xine_event_new_queue (m_stream); |
137 | 139 | ||
138 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); | 140 | xine_event_create_listener_thread (m_queue, xine_event_handler, this); |
141 | |||
142 | ::null_preload_decoders( m_stream ); | ||
143 | |||
139 | m_duringInitialization = false; | 144 | m_duringInitialization = false; |
140 | } | 145 | } |
141 | 146 | ||
142 | Lib::~Lib() { | 147 | Lib::~Lib() { |
143 | assert( isRunning() == false ); | 148 | assert( isRunning() == false ); |
144 | assert( m_initialized ); | 149 | assert( m_initialized ); |
145 | 150 | ||
146 | // free( m_config ); | 151 | // free( m_config ); |
147 | 152 | ||
148 | xine_close( m_stream ); | 153 | xine_close( m_stream ); |
149 | 154 | ||
150 | xine_event_dispose_queue( m_queue ); | 155 | xine_event_dispose_queue( m_queue ); |
151 | 156 | ||
152 | xine_dispose( m_stream ); | 157 | xine_dispose( m_stream ); |
153 | 158 | ||
154 | xine_exit( m_xine ); | 159 | xine_exit( m_xine ); |
155 | /* FIXME either free or delete but valgrind bitches against both */ | 160 | /* FIXME either free or delete but valgrind bitches against both */ |
156 | //free( m_videoOutput ); | 161 | //free( m_videoOutput ); |
157 | //delete m_audioOutput; | 162 | //delete m_audioOutput; |
158 | } | 163 | } |
159 | 164 | ||
160 | void Lib::resize ( const QSize &s ) { | 165 | void Lib::resize ( const QSize &s ) { |
161 | assert( m_initialized || m_duringInitialization ); | 166 | assert( m_initialized || m_duringInitialization ); |
162 | 167 | ||
163 | if ( s. width ( ) && s. height ( ) ) { | 168 | if ( s. width ( ) && s. height ( ) ) { |
164 | ::null_set_gui_width( m_videoOutput, s. width() ); | 169 | ::null_set_gui_width( m_videoOutput, s. width() ); |
165 | ::null_set_gui_height( m_videoOutput, s. height() ); | 170 | ::null_set_gui_height( m_videoOutput, s. height() ); |
166 | } | 171 | } |
167 | } | 172 | } |
168 | 173 | ||
169 | int Lib::majorVersion() { | 174 | int Lib::majorVersion() { |
170 | int major, minor, sub; | 175 | int major, minor, sub; |
171 | xine_get_version ( &major, &minor, &sub ); | 176 | xine_get_version ( &major, &minor, &sub ); |
172 | return major; | 177 | return major; |
173 | } | 178 | } |
174 | 179 | ||
175 | int Lib::minorVersion() { | 180 | int Lib::minorVersion() { |
176 | int major, minor, sub; | 181 | int major, minor, sub; |
177 | xine_get_version ( &major, &minor, &sub ); | 182 | xine_get_version ( &major, &minor, &sub ); |
178 | return minor; | 183 | return minor; |
179 | } | 184 | } |
180 | 185 | ||
181 | int Lib::subVersion() { | 186 | int Lib::subVersion() { |
182 | int major, minor, sub; | 187 | int major, minor, sub; |
183 | xine_get_version ( &major, &minor, &sub ); | 188 | xine_get_version ( &major, &minor, &sub ); |
184 | return sub; | 189 | return sub; |
185 | } | 190 | } |
186 | 191 | ||
187 | int Lib::play( const QString& fileName, int startPos, int start_time ) { | 192 | int Lib::play( const QString& fileName, int startPos, int start_time ) { |
188 | assert( m_initialized ); | 193 | assert( m_initialized ); |
189 | 194 | ||
190 | QString str = fileName.stripWhiteSpace(); | 195 | QString str = fileName.stripWhiteSpace(); |
191 | if ( !xine_open( m_stream, QFile::encodeName(str.utf8() ).data() ) ) { | 196 | if ( !xine_open( m_stream, QFile::encodeName(str.utf8() ).data() ) ) { |
192 | return 0; | 197 | return 0; |
193 | } | 198 | } |
194 | return xine_play( m_stream, startPos, start_time); | 199 | return xine_play( m_stream, startPos, start_time); |
195 | } | 200 | } |
196 | 201 | ||
197 | void Lib::stop() { | 202 | void Lib::stop() { |
198 | assert( m_initialized ); | 203 | assert( m_initialized ); |
199 | 204 | ||
200 | qDebug("<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>"); | 205 | qDebug("<<<<<<<< STOP IN LIB TRIGGERED >>>>>>>"); |
201 | xine_stop( m_stream ); | 206 | xine_stop( m_stream ); |
202 | } | 207 | } |
203 | 208 | ||
204 | void Lib::pause( bool toggle ) { | 209 | void Lib::pause( bool toggle ) { |
205 | assert( m_initialized ); | 210 | assert( m_initialized ); |
206 | 211 | ||
207 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); | 212 | xine_set_param( m_stream, XINE_PARAM_SPEED, toggle ? XINE_SPEED_PAUSE : XINE_SPEED_NORMAL ); |
208 | } | 213 | } |
209 | 214 | ||
210 | int Lib::speed() const { | 215 | int Lib::speed() const { |
211 | assert( m_initialized ); | 216 | assert( m_initialized ); |
212 | 217 | ||
213 | return xine_get_param ( m_stream, XINE_PARAM_SPEED ); | 218 | return xine_get_param ( m_stream, XINE_PARAM_SPEED ); |
214 | } | 219 | } |
215 | 220 | ||
216 | void Lib::setSpeed( int speed ) { | 221 | void Lib::setSpeed( int speed ) { |
217 | assert( m_initialized ); | 222 | assert( m_initialized ); |
218 | 223 | ||
219 | xine_set_param ( m_stream, XINE_PARAM_SPEED, speed ); | 224 | xine_set_param ( m_stream, XINE_PARAM_SPEED, speed ); |
220 | } | 225 | } |
221 | 226 | ||
222 | int Lib::status() const { | 227 | int Lib::status() const { |
223 | assert( m_initialized ); | 228 | assert( m_initialized ); |
224 | 229 | ||
225 | return xine_get_status( m_stream ); | 230 | return xine_get_status( m_stream ); |
226 | } | 231 | } |
227 | 232 | ||
228 | int Lib::currentPosition() const { | 233 | int Lib::currentPosition() const { |
229 | assert( m_initialized ); | 234 | assert( m_initialized ); |
230 | 235 | ||
231 | int pos, time, length; | 236 | int pos, time, length; |
232 | xine_get_pos_length( m_stream, &pos, &time, &length ); | 237 | xine_get_pos_length( m_stream, &pos, &time, &length ); |
233 | return pos; | 238 | return pos; |
234 | } | 239 | } |
235 | 240 | ||
236 | int Lib::currentTime() const { | 241 | int Lib::currentTime() const { |
237 | assert( m_initialized ); | 242 | assert( m_initialized ); |
238 | 243 | ||
239 | int pos, time, length; | 244 | int pos, time, length; |
240 | xine_get_pos_length( m_stream, &pos, &time, &length ); | 245 | xine_get_pos_length( m_stream, &pos, &time, &length ); |
241 | return time/1000; | 246 | return time/1000; |
242 | } | 247 | } |
243 | 248 | ||
244 | int Lib::length() const { | 249 | int Lib::length() const { |
245 | assert( m_initialized ); | 250 | assert( m_initialized ); |
246 | 251 | ||
247 | int pos, time, length; | 252 | int pos, time, length; |
248 | xine_get_pos_length( m_stream, &pos, &time, &length ); | 253 | xine_get_pos_length( m_stream, &pos, &time, &length ); |
249 | return length/1000; | 254 | return length/1000; |
250 | } | 255 | } |
251 | 256 | ||
252 | bool Lib::isSeekable() const { | 257 | bool Lib::isSeekable() const { |
253 | assert( m_initialized ); | 258 | assert( m_initialized ); |
254 | 259 | ||
255 | return xine_get_stream_info( m_stream, XINE_STREAM_INFO_SEEKABLE ); | 260 | return xine_get_stream_info( m_stream, XINE_STREAM_INFO_SEEKABLE ); |
256 | } | 261 | } |
257 | 262 | ||
258 | void Lib::seekTo( int time ) { | 263 | void Lib::seekTo( int time ) { |
259 | assert( m_initialized ); | 264 | assert( m_initialized ); |
260 | 265 | ||
261 | //xine_trick_mode ( m_stream, XINE_TRICK_MODE_SEEK_TO_TIME, time ); NOT IMPLEMENTED YET IN XINE :_( | 266 | //xine_trick_mode ( m_stream, XINE_TRICK_MODE_SEEK_TO_TIME, time ); NOT IMPLEMENTED YET IN XINE :_( |
262 | // since its now milliseconds we need *1000 | 267 | // since its now milliseconds we need *1000 |
263 | xine_play( m_stream, 0, time*1000 ); | 268 | xine_play( m_stream, 0, time*1000 ); |
264 | } | 269 | } |
265 | 270 | ||
266 | 271 | ||
267 | Frame Lib::currentFrame() const { | 272 | Frame Lib::currentFrame() const { |
268 | assert( m_initialized ); | 273 | assert( m_initialized ); |
269 | 274 | ||
270 | Frame frame; | 275 | Frame frame; |
271 | return frame; | 276 | return frame; |
272 | }; | 277 | }; |
273 | 278 | ||
274 | QString Lib::metaInfo( int number) const { | 279 | QString Lib::metaInfo( int number) const { |
275 | assert( m_initialized ); | 280 | assert( m_initialized ); |
276 | 281 | ||
277 | return xine_get_meta_info( m_stream, number ); | 282 | return xine_get_meta_info( m_stream, number ); |
278 | } | 283 | } |
279 | 284 | ||
280 | int Lib::error() const { | 285 | int Lib::error() const { |
281 | assert( m_initialized ); | 286 | assert( m_initialized ); |
282 | 287 | ||
283 | return xine_get_error( m_stream ); | 288 | return xine_get_error( m_stream ); |
284 | }; | 289 | }; |
285 | 290 | ||
286 | void Lib::ensureInitialized() | 291 | void Lib::ensureInitialized() |
287 | { | 292 | { |
288 | if ( m_initialized ) | 293 | if ( m_initialized ) |
289 | return; | 294 | return; |
290 | 295 | ||
291 | qDebug( "waiting for initialization thread to finish" ); | 296 | qDebug( "waiting for initialization thread to finish" ); |
292 | wait(); | 297 | wait(); |
293 | qDebug( "initialization thread finished!" ); | 298 | qDebug( "initialization thread finished!" ); |
294 | } | 299 | } |
295 | 300 | ||
296 | void Lib::setWidget( XineVideoWidget *widget ) | 301 | void Lib::setWidget( XineVideoWidget *widget ) |
297 | { | 302 | { |
298 | m_wid = widget; | 303 | m_wid = widget; |
299 | resize ( m_wid-> size ( ) ); | 304 | resize ( m_wid-> size ( ) ); |
300 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); | 305 | ::null_set_mode( m_videoOutput, qt_screen->depth(), qt_screen->pixelType() ); |
301 | m_wid->repaint(); | 306 | m_wid->repaint(); |
302 | } | 307 | } |
303 | 308 | ||
304 | void Lib::receiveMessage( ThreadUtil::ChannelMessage *msg, SendType sendType ) | 309 | void Lib::receiveMessage( ThreadUtil::ChannelMessage *msg, SendType sendType ) |
305 | { | 310 | { |
306 | assert( sendType == ThreadUtil::Channel::OneWay ); | 311 | assert( sendType == ThreadUtil::Channel::OneWay ); |
307 | handleXineEvent( msg->type() ); | 312 | handleXineEvent( msg->type() ); |
308 | delete msg; | 313 | delete msg; |
309 | } | 314 | } |
310 | 315 | ||
311 | void Lib::handleXineEvent( const xine_event_t* t ) { | 316 | void Lib::handleXineEvent( const xine_event_t* t ) { |
312 | send( new ThreadUtil::ChannelMessage( t->type ), OneWay ); | 317 | send( new ThreadUtil::ChannelMessage( t->type ), OneWay ); |
313 | } | 318 | } |
314 | 319 | ||
315 | void Lib::handleXineEvent( int type ) { | 320 | void Lib::handleXineEvent( int type ) { |
316 | assert( m_initialized ); | 321 | assert( m_initialized ); |
317 | 322 | ||
318 | if ( type == XINE_EVENT_UI_PLAYBACK_FINISHED ) { | 323 | if ( type == XINE_EVENT_UI_PLAYBACK_FINISHED ) { |
319 | emit stopped(); | 324 | emit stopped(); |
320 | } | 325 | } |
321 | } | 326 | } |
322 | 327 | ||
323 | 328 | ||
324 | void Lib::setShowVideo( bool video ) { | 329 | void Lib::setShowVideo( bool video ) { |
325 | assert( m_initialized ); | 330 | assert( m_initialized ); |
326 | 331 | ||
327 | m_video = video; | 332 | m_video = video; |
328 | ::null_set_show_video( m_videoOutput, video ); | 333 | ::null_set_show_video( m_videoOutput, video ); |
329 | } | 334 | } |
330 | 335 | ||
331 | bool Lib::isShowingVideo() const { | 336 | bool Lib::isShowingVideo() const { |
332 | assert( m_initialized ); | 337 | assert( m_initialized ); |
333 | 338 | ||
334 | return ::null_is_showing_video( m_videoOutput ); | 339 | return ::null_is_showing_video( m_videoOutput ); |
335 | } | 340 | } |
336 | 341 | ||
337 | bool Lib::hasVideo() const { | 342 | bool Lib::hasVideo() const { |
338 | assert( m_initialized ); | 343 | assert( m_initialized ); |
339 | 344 | ||
340 | return xine_get_stream_info( m_stream, 18 ); | 345 | return xine_get_stream_info( m_stream, 18 ); |
341 | } | 346 | } |
342 | 347 | ||
343 | void Lib::showVideoFullScreen( bool fullScreen ) { | 348 | void Lib::showVideoFullScreen( bool fullScreen ) { |
344 | assert( m_initialized ); | 349 | assert( m_initialized ); |
345 | 350 | ||
346 | ::null_set_fullscreen( m_videoOutput, fullScreen ); | 351 | ::null_set_fullscreen( m_videoOutput, fullScreen ); |
347 | } | 352 | } |
348 | 353 | ||
349 | bool Lib::isVideoFullScreen() const { | 354 | bool Lib::isVideoFullScreen() const { |
350 | assert( m_initialized ); | 355 | assert( m_initialized ); |
351 | 356 | ||
352 | return ::null_is_fullscreen( m_videoOutput ); | 357 | return ::null_is_fullscreen( m_videoOutput ); |
353 | } | 358 | } |
354 | 359 | ||
355 | void Lib::setScaling( bool scale ) { | 360 | void Lib::setScaling( bool scale ) { |
356 | assert( m_initialized ); | 361 | assert( m_initialized ); |
357 | 362 | ||
358 | ::null_set_scaling( m_videoOutput, scale ); | 363 | ::null_set_scaling( m_videoOutput, scale ); |
359 | } | 364 | } |
360 | 365 | ||
361 | void Lib::setGamma( int value ) { | 366 | void Lib::setGamma( int value ) { |
362 | assert( m_initialized ); | 367 | assert( m_initialized ); |
363 | 368 | ||
364 | //qDebug( QString( "%1").arg(value) ); | 369 | //qDebug( QString( "%1").arg(value) ); |
365 | /* int gammaValue = ( 100 + value ); */ | 370 | /* int gammaValue = ( 100 + value ); */ |
366 | ::null_set_videoGamma( m_videoOutput, value ); | 371 | ::null_set_videoGamma( m_videoOutput, value ); |
367 | } | 372 | } |
368 | 373 | ||
369 | bool Lib::isScaling() const { | 374 | bool Lib::isScaling() const { |
370 | assert( m_initialized ); | 375 | assert( m_initialized ); |
371 | 376 | ||
372 | return ::null_is_scaling( m_videoOutput ); | 377 | return ::null_is_scaling( m_videoOutput ); |
373 | } | 378 | } |
374 | 379 | ||
375 | void Lib::xine_event_handler( void* user_data, const xine_event_t* t ) { | 380 | void Lib::xine_event_handler( void* user_data, const xine_event_t* t ) { |
376 | ( (Lib*)user_data)->handleXineEvent( t ); | 381 | ( (Lib*)user_data)->handleXineEvent( t ); |
377 | } | 382 | } |
378 | 383 | ||
379 | void Lib::xine_display_frame( void* user_data, uint8_t *frame, | 384 | void Lib::xine_display_frame( void* user_data, uint8_t *frame, |
380 | int width, int height, int bytes ) { | 385 | int width, int height, int bytes ) { |
381 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); | 386 | ( (Lib*)user_data)->drawFrame( frame, width, height, bytes ); |
382 | } | 387 | } |
383 | 388 | ||
384 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { | 389 | void Lib::drawFrame( uint8_t* frame, int width, int height, int bytes ) { |
385 | assert( m_initialized ); | 390 | assert( m_initialized ); |
386 | 391 | ||
387 | if ( !m_video ) { | 392 | if ( !m_video ) { |
388 | qWarning("not showing video now"); | 393 | qWarning("not showing video now"); |
389 | return; | 394 | return; |
390 | } | 395 | } |
391 | 396 | ||
392 | assert( m_wid ); | 397 | assert( m_wid ); |
393 | 398 | ||
394 | m_wid-> setVideoFrame ( frame, width, height, bytes ); | 399 | m_wid-> setVideoFrame ( frame, width, height, bytes ); |
395 | } | 400 | } |
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index c988854..e2eb663 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c | |||
@@ -1,614 +1,635 @@ | |||
1 | /* | 1 | /* |
2 | This file is part of the Opie Project | 2 | This file is part of the Opie Project |
3 | 3 | ||
4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> | 4 | Copyright (c) 2002 Max Reiss <harlekin@handhelds.org> |
5 | Copyright (c) 2002 LJP <> | 5 | Copyright (c) 2002 LJP <> |
6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> | 6 | Copyright (c) 2002 Holger Freyther <zecke@handhelds.org> |
7 | =. | 7 | =. |
8 | .=l. | 8 | .=l. |
9 | .>+-= | 9 | .>+-= |
10 | _;:, .> :=|. This program is free software; you can | 10 | _;:, .> :=|. This program is free software; you can |
11 | .> <`_, > . <= redistribute it and/or modify it under | 11 | .> <`_, > . <= redistribute it and/or modify it under |
12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public | 12 | :`=1 )Y*s>-.-- : the terms of the GNU General Public |
13 | .="- .-=="i, .._ License as published by the Free Software | 13 | .="- .-=="i, .._ License as published by the Free Software |
14 | - . .-<_> .<> Foundation; either version 2 of the License, | 14 | - . .-<_> .<> Foundation; either version 2 of the License, |
15 | ._= =} : or (at your option) any later version. | 15 | ._= =} : or (at your option) any later version. |
16 | .%`+i> _;_. | 16 | .%`+i> _;_. |
17 | .i_,=:_. -<s. This program is distributed in the hope that | 17 | .i_,=:_. -<s. This program is distributed in the hope that |
18 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; | 18 | + . -:. = it will be useful, but WITHOUT ANY WARRANTY; |
19 | : .. .:, . . . without even the implied warranty of | 19 | : .. .:, . . . without even the implied warranty of |
20 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A | 20 | =_ + =;=|` MERCHANTABILITY or FITNESS FOR A |
21 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU | 21 | _.=:. : :=>`: PARTICULAR PURPOSE. See the GNU |
22 | ..}^=.= = ; Library General Public License for more | 22 | ..}^=.= = ; Library General Public License for more |
23 | ++= -. .` .: details. | 23 | ++= -. .` .: details. |
24 | : = ...= . :.=- | 24 | : = ...= . :.=- |
25 | -. .:....=;==+<; You should have received a copy of the GNU | 25 | -. .:....=;==+<; You should have received a copy of the GNU |
26 | -_. . . )=. = Library General Public License along with | 26 | -_. . . )=. = Library General Public License along with |
27 | -- :-=` this library; see the file COPYING.LIB. | 27 | -- :-=` this library; see the file COPYING.LIB. |
28 | If not, write to the Free Software Foundation, | 28 | If not, write to the Free Software Foundation, |
29 | Inc., 59 Temple Place - Suite 330, | 29 | Inc., 59 Temple Place - Suite 330, |
30 | Boston, MA 02111-1307, USA. | 30 | Boston, MA 02111-1307, USA. |
31 | 31 | ||
32 | */ | 32 | */ |
33 | 33 | ||
34 | #include <stdlib.h> | 34 | #include <stdlib.h> |
35 | #include <stdio.h> | 35 | #include <stdio.h> |
36 | 36 | ||
37 | #include <math.h> | 37 | #include <math.h> |
38 | 38 | ||
39 | #include <xine.h> | 39 | #include <xine.h> |
40 | #include <xine/video_out.h> | 40 | #include <xine/video_out.h> |
41 | #include <xine/xine_internal.h> | 41 | #include <xine/xine_internal.h> |
42 | #include <xine/xineutils.h> | 42 | #include <xine/xineutils.h> |
43 | #include <xine/vo_scale.h> | 43 | #include <xine/vo_scale.h> |
44 | #include <xine/buffer.h> | ||
44 | 45 | ||
45 | #include <pthread.h> | 46 | #include <pthread.h> |
46 | #include "alphablend.h" | 47 | #include "alphablend.h" |
47 | #include "yuv2rgb.h" | 48 | #include "yuv2rgb.h" |
48 | 49 | ||
49 | #define printf(x,...) | 50 | #define printf(x,...) |
50 | 51 | ||
51 | /* | 52 | /* |
52 | #define LOG | 53 | #define LOG |
53 | */ | 54 | */ |
54 | 55 | ||
55 | /* the caller for our event draw handler */ | 56 | /* the caller for our event draw handler */ |
56 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, | 57 | typedef void (*display_xine_frame_t) (void *user_data, uint8_t* frame, |
57 | int width, int height,int bytes ); | 58 | int width, int height,int bytes ); |
58 | 59 | ||
59 | typedef struct null_driver_s null_driver_t; | 60 | typedef struct null_driver_s null_driver_t; |
60 | 61 | ||
61 | struct null_driver_s { | 62 | struct null_driver_s { |
62 | vo_driver_t vo_driver; | 63 | vo_driver_t vo_driver; |
63 | 64 | ||
64 | uint32_t m_capabilities; | 65 | uint32_t m_capabilities; |
65 | int m_show_video; | 66 | int m_show_video; |
66 | int m_video_fullscreen; | 67 | int m_video_fullscreen; |
67 | int m_is_scaling; | 68 | int m_is_scaling; |
68 | 69 | ||
69 | int depth, bpp, bytes_per_pixel; | 70 | int depth, bpp, bytes_per_pixel; |
70 | int yuv2rgb_mode; | 71 | int yuv2rgb_mode; |
71 | int yuv2rgb_swap; | 72 | int yuv2rgb_swap; |
72 | int yuv2rgb_gamma; | 73 | int yuv2rgb_gamma; |
73 | uint8_t *yuv2rgb_cmap; | 74 | uint8_t *yuv2rgb_cmap; |
74 | yuv2rgb_factory_t *yuv2rgb_factory; | 75 | yuv2rgb_factory_t *yuv2rgb_factory; |
75 | 76 | ||
76 | vo_overlay_t *overlay; | 77 | vo_overlay_t *overlay; |
77 | vo_scale_t sc; | 78 | vo_scale_t sc; |
78 | 79 | ||
79 | int gui_width; | 80 | int gui_width; |
80 | int gui_height; | 81 | int gui_height; |
81 | int gui_changed; | 82 | int gui_changed; |
82 | 83 | ||
83 | double display_ratio; | 84 | double display_ratio; |
84 | void* caller; | 85 | void* caller; |
85 | display_xine_frame_t frameDis; | 86 | display_xine_frame_t frameDis; |
86 | }; | 87 | }; |
87 | 88 | ||
88 | typedef struct opie_frame_s opie_frame_t; | 89 | typedef struct opie_frame_s opie_frame_t; |
89 | struct opie_frame_s { | 90 | struct opie_frame_s { |
90 | vo_frame_t frame; | 91 | vo_frame_t frame; |
91 | 92 | ||
92 | int format; | 93 | int format; |
93 | int flags; | 94 | int flags; |
94 | 95 | ||
95 | vo_scale_t sc; | 96 | vo_scale_t sc; |
96 | 97 | ||
97 | uint8_t *chunk[3]; | 98 | uint8_t *chunk[3]; |
98 | 99 | ||
99 | uint8_t *data; /* rgb */ | 100 | uint8_t *data; /* rgb */ |
100 | int bytes_per_line; | 101 | int bytes_per_line; |
101 | 102 | ||
102 | yuv2rgb_t *yuv2rgb; | 103 | yuv2rgb_t *yuv2rgb; |
103 | uint8_t *rgb_dst; | 104 | uint8_t *rgb_dst; |
104 | int yuv_stride; | 105 | int yuv_stride; |
105 | int stripe_height, stripe_inc; | 106 | int stripe_height, stripe_inc; |
106 | 107 | ||
107 | null_driver_t *output; | 108 | null_driver_t *output; |
108 | }; | 109 | }; |
109 | 110 | ||
110 | static uint32_t null_get_capabilities( vo_driver_t *self ){ | 111 | static uint32_t null_get_capabilities( vo_driver_t *self ){ |
111 | null_driver_t* this = (null_driver_t*)self; | 112 | null_driver_t* this = (null_driver_t*)self; |
112 | return this->m_capabilities; | 113 | return this->m_capabilities; |
113 | } | 114 | } |
114 | 115 | ||
115 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { | 116 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { |
116 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 117 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
117 | 118 | ||
118 | if (!frame->output->m_show_video) { | 119 | if (!frame->output->m_show_video) { |
119 | /* printf("nullvideo: no video\n"); */ | 120 | /* printf("nullvideo: no video\n"); */ |
120 | return; | 121 | return; |
121 | } | 122 | } |
122 | 123 | ||
123 | if (frame->format == XINE_IMGFMT_YV12) { | 124 | if (frame->format == XINE_IMGFMT_YV12) { |
124 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 125 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
125 | src[0], src[1], src[2]); | 126 | src[0], src[1], src[2]); |
126 | } else { | 127 | } else { |
127 | 128 | ||
128 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 129 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
129 | src[0]); | 130 | src[0]); |
130 | } | 131 | } |
131 | 132 | ||
132 | frame->rgb_dst += frame->stripe_inc; | 133 | frame->rgb_dst += frame->stripe_inc; |
133 | } | 134 | } |
134 | 135 | ||
135 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { | 136 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { |
136 | 137 | ||
137 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 138 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
138 | 139 | ||
139 | switch (which_field) { | 140 | switch (which_field) { |
140 | case VO_TOP_FIELD: | 141 | case VO_TOP_FIELD: |
141 | frame->rgb_dst = (uint8_t *)frame->data; | 142 | frame->rgb_dst = (uint8_t *)frame->data; |
142 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; | 143 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; |
143 | break; | 144 | break; |
144 | case VO_BOTTOM_FIELD: | 145 | case VO_BOTTOM_FIELD: |
145 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 146 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
146 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; | 147 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; |
147 | break; | 148 | break; |
148 | case VO_BOTH_FIELDS: | 149 | case VO_BOTH_FIELDS: |
149 | frame->rgb_dst = (uint8_t *)frame->data; | 150 | frame->rgb_dst = (uint8_t *)frame->data; |
150 | break; | 151 | break; |
151 | } | 152 | } |
152 | } | 153 | } |
153 | 154 | ||
154 | 155 | ||
155 | /* take care of the frame*/ | 156 | /* take care of the frame*/ |
156 | static void null_frame_dispose( vo_frame_t* vo_img){ | 157 | static void null_frame_dispose( vo_frame_t* vo_img){ |
157 | opie_frame_t* frame = (opie_frame_t*)vo_img; | 158 | opie_frame_t* frame = (opie_frame_t*)vo_img; |
158 | 159 | ||
159 | if (frame->data) | 160 | if (frame->data) |
160 | free( frame->data ); | 161 | free( frame->data ); |
161 | free (frame); | 162 | free (frame); |
162 | } | 163 | } |
163 | 164 | ||
164 | /* end take care of frames*/ | 165 | /* end take care of frames*/ |
165 | 166 | ||
166 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ | 167 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ |
167 | 168 | ||
168 | null_driver_t* this = (null_driver_t*)self; | 169 | null_driver_t* this = (null_driver_t*)self; |
169 | opie_frame_t* frame; | 170 | opie_frame_t* frame; |
170 | 171 | ||
171 | #ifdef LOG | 172 | #ifdef LOG |
172 | fprintf (stderr, "nullvideo: alloc_frame\n"); | 173 | fprintf (stderr, "nullvideo: alloc_frame\n"); |
173 | #endif | 174 | #endif |
174 | 175 | ||
175 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); | 176 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); |
176 | 177 | ||
177 | memset( frame, 0, sizeof( opie_frame_t) ); | 178 | memset( frame, 0, sizeof( opie_frame_t) ); |
178 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); | 179 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); |
179 | 180 | ||
180 | pthread_mutex_init (&frame->frame.mutex, NULL); | 181 | pthread_mutex_init (&frame->frame.mutex, NULL); |
181 | 182 | ||
182 | frame->output = this; | 183 | frame->output = this; |
183 | 184 | ||
184 | /* initialize the frame*/ | 185 | /* initialize the frame*/ |
185 | frame->frame.driver = self; | 186 | frame->frame.driver = self; |
186 | frame->frame.copy = null_frame_copy; | 187 | frame->frame.copy = null_frame_copy; |
187 | frame->frame.field = null_frame_field; | 188 | frame->frame.field = null_frame_field; |
188 | frame->frame.dispose = null_frame_dispose; | 189 | frame->frame.dispose = null_frame_dispose; |
189 | 190 | ||
190 | /* | 191 | /* |
191 | * colorspace converter for this frame | 192 | * colorspace converter for this frame |
192 | */ | 193 | */ |
193 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); | 194 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); |
194 | 195 | ||
195 | return (vo_frame_t*) frame; | 196 | return (vo_frame_t*) frame; |
196 | } | 197 | } |
197 | 198 | ||
198 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | 199 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, |
199 | uint32_t width, uint32_t height, | 200 | uint32_t width, uint32_t height, |
200 | int ratio_code, int format, int flags ){ | 201 | int ratio_code, int format, int flags ){ |
201 | null_driver_t* this = (null_driver_t*) self; | 202 | null_driver_t* this = (null_driver_t*) self; |
202 | opie_frame_t* frame = (opie_frame_t*)img; | 203 | opie_frame_t* frame = (opie_frame_t*)img; |
203 | /* not needed now */ | 204 | /* not needed now */ |
204 | 205 | ||
205 | #ifdef LOG | 206 | #ifdef LOG |
206 | fprintf (stderr, "nullvideo: update_frame_format\n"); | 207 | fprintf (stderr, "nullvideo: update_frame_format\n"); |
207 | #endif | 208 | #endif |
208 | 209 | ||
209 | flags &= VO_BOTH_FIELDS; | 210 | flags &= VO_BOTH_FIELDS; |
210 | 211 | ||
211 | /* find out if we need to adapt this frame */ | 212 | /* find out if we need to adapt this frame */ |
212 | 213 | ||
213 | if ((width != frame->sc.delivered_width) | 214 | if ((width != frame->sc.delivered_width) |
214 | || (height != frame->sc.delivered_height) | 215 | || (height != frame->sc.delivered_height) |
215 | || (ratio_code != frame->sc.delivered_ratio_code) | 216 | || (ratio_code != frame->sc.delivered_ratio_code) |
216 | || (flags != frame->flags) | 217 | || (flags != frame->flags) |
217 | || (format != frame->format) | 218 | || (format != frame->format) |
218 | || (this->sc.user_ratio != frame->sc.user_ratio) | 219 | || (this->sc.user_ratio != frame->sc.user_ratio) |
219 | || (this->gui_width != frame->sc.gui_width) | 220 | || (this->gui_width != frame->sc.gui_width) |
220 | || (this->gui_height != frame->sc.gui_height)) { | 221 | || (this->gui_height != frame->sc.gui_height)) { |
221 | 222 | ||
222 | frame->sc.delivered_width = width; | 223 | frame->sc.delivered_width = width; |
223 | frame->sc.delivered_height = height; | 224 | frame->sc.delivered_height = height; |
224 | frame->sc.delivered_ratio_code = ratio_code; | 225 | frame->sc.delivered_ratio_code = ratio_code; |
225 | frame->flags = flags; | 226 | frame->flags = flags; |
226 | frame->format = format; | 227 | frame->format = format; |
227 | frame->sc.user_ratio = this->sc.user_ratio; | 228 | frame->sc.user_ratio = this->sc.user_ratio; |
228 | frame->sc.gui_width = this->gui_width; | 229 | frame->sc.gui_width = this->gui_width; |
229 | frame->sc.gui_height = this->gui_height; | 230 | frame->sc.gui_height = this->gui_height; |
230 | frame->sc.gui_pixel_aspect = 1.0; | 231 | frame->sc.gui_pixel_aspect = 1.0; |
231 | 232 | ||
232 | vo_scale_compute_ideal_size ( &frame->sc ); | 233 | vo_scale_compute_ideal_size ( &frame->sc ); |
233 | vo_scale_compute_output_size( &frame->sc ); | 234 | vo_scale_compute_output_size( &frame->sc ); |
234 | 235 | ||
235 | #ifdef LOG | 236 | #ifdef LOG |
236 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | 237 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", |
237 | frame->sc.gui_width, frame->sc.gui_height, | 238 | frame->sc.gui_width, frame->sc.gui_height, |
238 | frame->sc.delivered_width, frame->sc.delivered_height, | 239 | frame->sc.delivered_width, frame->sc.delivered_height, |
239 | frame->sc.output_width, frame->sc.output_height); | 240 | frame->sc.output_width, frame->sc.output_height); |
240 | #endif | 241 | #endif |
241 | 242 | ||
242 | /* | 243 | /* |
243 | * (re-) allocate | 244 | * (re-) allocate |
244 | */ | 245 | */ |
245 | if( frame->data ) { | 246 | if( frame->data ) { |
246 | if( frame->chunk[0] ){ | 247 | if( frame->chunk[0] ){ |
247 | free( frame->chunk[0] ); | 248 | free( frame->chunk[0] ); |
248 | frame->chunk[0] = NULL; | 249 | frame->chunk[0] = NULL; |
249 | } | 250 | } |
250 | if( frame->chunk[1] ){ | 251 | if( frame->chunk[1] ){ |
251 | free ( frame->chunk[1] ); | 252 | free ( frame->chunk[1] ); |
252 | frame->chunk[1] = NULL; | 253 | frame->chunk[1] = NULL; |
253 | } | 254 | } |
254 | if( frame->chunk[2] ){ | 255 | if( frame->chunk[2] ){ |
255 | free ( frame->chunk[2] ); | 256 | free ( frame->chunk[2] ); |
256 | frame->chunk[2] = NULL; | 257 | frame->chunk[2] = NULL; |
257 | } | 258 | } |
258 | free ( frame->data ); | 259 | free ( frame->data ); |
259 | } | 260 | } |
260 | 261 | ||
261 | frame->data = xine_xmalloc (frame->sc.output_width | 262 | frame->data = xine_xmalloc (frame->sc.output_width |
262 | * frame->sc.output_height | 263 | * frame->sc.output_height |
263 | * this->bytes_per_pixel ); | 264 | * this->bytes_per_pixel ); |
264 | 265 | ||
265 | if( format == XINE_IMGFMT_YV12 ) { | 266 | if( format == XINE_IMGFMT_YV12 ) { |
266 | frame->frame.pitches[0] = 8*((width + 7) / 8); | 267 | frame->frame.pitches[0] = 8*((width + 7) / 8); |
267 | frame->frame.pitches[1] = 8*((width + 15) / 16); | 268 | frame->frame.pitches[1] = 8*((width + 15) / 16); |
268 | frame->frame.pitches[2] = 8*((width + 15) / 16); | 269 | frame->frame.pitches[2] = 8*((width + 15) / 16); |
269 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); | 270 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); |
270 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); | 271 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); |
271 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); | 272 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); |
272 | 273 | ||
273 | }else{ | 274 | }else{ |
274 | frame->frame.pitches[0] = 8*((width + 3) / 4); | 275 | frame->frame.pitches[0] = 8*((width + 3) / 4); |
275 | 276 | ||
276 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, | 277 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, |
277 | (void **)&frame->chunk[0]); | 278 | (void **)&frame->chunk[0]); |
278 | frame->chunk[1] = NULL; | 279 | frame->chunk[1] = NULL; |
279 | frame->chunk[2] = NULL; | 280 | frame->chunk[2] = NULL; |
280 | } | 281 | } |
281 | 282 | ||
282 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; | 283 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; |
283 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; | 284 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; |
284 | 285 | ||
285 | /* | 286 | /* |
286 | * set up colorspace converter | 287 | * set up colorspace converter |
287 | */ | 288 | */ |
288 | 289 | ||
289 | switch (flags) { | 290 | switch (flags) { |
290 | case VO_TOP_FIELD: | 291 | case VO_TOP_FIELD: |
291 | case VO_BOTTOM_FIELD: | 292 | case VO_BOTTOM_FIELD: |
292 | frame->yuv2rgb->configure (frame->yuv2rgb, | 293 | frame->yuv2rgb->configure (frame->yuv2rgb, |
293 | frame->sc.delivered_width, | 294 | frame->sc.delivered_width, |
294 | 16, | 295 | 16, |
295 | 2*frame->frame.pitches[0], | 296 | 2*frame->frame.pitches[0], |
296 | 2*frame->frame.pitches[1], | 297 | 2*frame->frame.pitches[1], |
297 | frame->sc.output_width, | 298 | frame->sc.output_width, |
298 | frame->stripe_height, | 299 | frame->stripe_height, |
299 | frame->bytes_per_line*2); | 300 | frame->bytes_per_line*2); |
300 | frame->yuv_stride = frame->bytes_per_line*2; | 301 | frame->yuv_stride = frame->bytes_per_line*2; |
301 | break; | 302 | break; |
302 | case VO_BOTH_FIELDS: | 303 | case VO_BOTH_FIELDS: |
303 | frame->yuv2rgb->configure (frame->yuv2rgb, | 304 | frame->yuv2rgb->configure (frame->yuv2rgb, |
304 | frame->sc.delivered_width, | 305 | frame->sc.delivered_width, |
305 | 16, | 306 | 16, |
306 | frame->frame.pitches[0], | 307 | frame->frame.pitches[0], |
307 | frame->frame.pitches[1], | 308 | frame->frame.pitches[1], |
308 | frame->sc.output_width, | 309 | frame->sc.output_width, |
309 | frame->stripe_height, | 310 | frame->stripe_height, |
310 | frame->bytes_per_line); | 311 | frame->bytes_per_line); |
311 | frame->yuv_stride = frame->bytes_per_line; | 312 | frame->yuv_stride = frame->bytes_per_line; |
312 | break; | 313 | break; |
313 | } | 314 | } |
314 | #ifdef LOG | 315 | #ifdef LOG |
315 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | 316 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); |
316 | #endif | 317 | #endif |
317 | } | 318 | } |
318 | 319 | ||
319 | /* | 320 | /* |
320 | * reset dest pointers | 321 | * reset dest pointers |
321 | */ | 322 | */ |
322 | 323 | ||
323 | if (frame->data) { | 324 | if (frame->data) { |
324 | switch (flags) { | 325 | switch (flags) { |
325 | case VO_TOP_FIELD: | 326 | case VO_TOP_FIELD: |
326 | frame->rgb_dst = (uint8_t *)frame->data; | 327 | frame->rgb_dst = (uint8_t *)frame->data; |
327 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 328 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
328 | break; | 329 | break; |
329 | case VO_BOTTOM_FIELD: | 330 | case VO_BOTTOM_FIELD: |
330 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 331 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
331 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 332 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
332 | break; | 333 | break; |
333 | case VO_BOTH_FIELDS: | 334 | case VO_BOTH_FIELDS: |
334 | frame->rgb_dst = (uint8_t *)frame->data; | 335 | frame->rgb_dst = (uint8_t *)frame->data; |
335 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; | 336 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; |
336 | break; | 337 | break; |
337 | } | 338 | } |
338 | } | 339 | } |
339 | } | 340 | } |
340 | 341 | ||
341 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ | 342 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ |
342 | null_driver_t* this = (null_driver_t*) self; | 343 | null_driver_t* this = (null_driver_t*) self; |
343 | opie_frame_t* frame = (opie_frame_t*)frame_gen; | 344 | opie_frame_t* frame = (opie_frame_t*)frame_gen; |
344 | display_xine_frame_t display = this->frameDis; | 345 | display_xine_frame_t display = this->frameDis; |
345 | 346 | ||
346 | if (!this->m_show_video) | 347 | if (!this->m_show_video) |
347 | return; | 348 | return; |
348 | 349 | ||
349 | if( display != NULL ) { | 350 | if( display != NULL ) { |
350 | (*display)(this->caller, frame->data, | 351 | (*display)(this->caller, frame->data, |
351 | frame->sc.output_width, frame->sc.output_height, | 352 | frame->sc.output_width, frame->sc.output_height, |
352 | frame->bytes_per_line ); | 353 | frame->bytes_per_line ); |
353 | } | 354 | } |
354 | 355 | ||
355 | frame->frame.displayed (&frame->frame); | 356 | frame->frame.displayed (&frame->frame); |
356 | } | 357 | } |
357 | 358 | ||
358 | 359 | ||
359 | /* blending related */ | 360 | /* blending related */ |
360 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, | 361 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, |
361 | vo_overlay_t *overlay, | 362 | vo_overlay_t *overlay, |
362 | opie_frame_t *frame) { | 363 | opie_frame_t *frame) { |
363 | int i; | 364 | int i; |
364 | clut_t* clut = (clut_t*) overlay->color; | 365 | clut_t* clut = (clut_t*) overlay->color; |
365 | if (!overlay->rgb_clut) { | 366 | if (!overlay->rgb_clut) { |
366 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 367 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
367 | *((uint32_t *)&clut[i]) = | 368 | *((uint32_t *)&clut[i]) = |
368 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, | 369 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, |
369 | clut[i].y, clut[i].cb, clut[i].cr); | 370 | clut[i].y, clut[i].cb, clut[i].cr); |
370 | } | 371 | } |
371 | overlay->rgb_clut++; | 372 | overlay->rgb_clut++; |
372 | } | 373 | } |
373 | if (!overlay->clip_rgb_clut) { | 374 | if (!overlay->clip_rgb_clut) { |
374 | clut = (clut_t*) overlay->clip_color; | 375 | clut = (clut_t*) overlay->clip_color; |
375 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 376 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
376 | *((uint32_t *)&clut[i]) = | 377 | *((uint32_t *)&clut[i]) = |
377 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, | 378 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, |
378 | clut[i].y, clut[i].cb, clut[i].cr); | 379 | clut[i].y, clut[i].cb, clut[i].cr); |
379 | } | 380 | } |
380 | overlay->clip_rgb_clut++; | 381 | overlay->clip_rgb_clut++; |
381 | } | 382 | } |
382 | } | 383 | } |
383 | 384 | ||
384 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { | 385 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { |
385 | null_driver_t *this = (null_driver_t *) this_gen; | 386 | null_driver_t *this = (null_driver_t *) this_gen; |
386 | opie_frame_t *frame = (opie_frame_t *) frame_gen; | 387 | opie_frame_t *frame = (opie_frame_t *) frame_gen; |
387 | 388 | ||
388 | if(!this->m_show_video || frame->sc.output_width == 0 | 389 | if(!this->m_show_video || frame->sc.output_width == 0 |
389 | || frame->sc.output_height== 0) | 390 | || frame->sc.output_height== 0) |
390 | return; | 391 | return; |
391 | 392 | ||
392 | /* Alpha Blend here */ | 393 | /* Alpha Blend here */ |
393 | if (overlay->rle) { | 394 | if (overlay->rle) { |
394 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) | 395 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) |
395 | null_overlay_clut_yuv2rgb(this,overlay,frame); | 396 | null_overlay_clut_yuv2rgb(this,overlay,frame); |
396 | 397 | ||
397 | switch(this->bpp) { | 398 | switch(this->bpp) { |
398 | case 16: | 399 | case 16: |
399 | blend_rgb16( (uint8_t *)frame->data, overlay, | 400 | blend_rgb16( (uint8_t *)frame->data, overlay, |
400 | frame->sc.output_width, frame->sc.output_height, | 401 | frame->sc.output_width, frame->sc.output_height, |
401 | frame->sc.delivered_width, frame->sc.delivered_height); | 402 | frame->sc.delivered_width, frame->sc.delivered_height); |
402 | break; | 403 | break; |
403 | case 24: | 404 | case 24: |
404 | blend_rgb24( (uint8_t *)frame->data, overlay, | 405 | blend_rgb24( (uint8_t *)frame->data, overlay, |
405 | frame->sc.output_width, frame->sc.output_height, | 406 | frame->sc.output_width, frame->sc.output_height, |
406 | frame->sc.delivered_width, frame->sc.delivered_height); | 407 | frame->sc.delivered_width, frame->sc.delivered_height); |
407 | break; | 408 | break; |
408 | case 32: | 409 | case 32: |
409 | blend_rgb32( (uint8_t *)frame->data, overlay, | 410 | blend_rgb32( (uint8_t *)frame->data, overlay, |
410 | frame->sc.output_width, frame->sc.output_height, | 411 | frame->sc.output_width, frame->sc.output_height, |
411 | frame->sc.delivered_width, frame->sc.delivered_height); | 412 | frame->sc.delivered_width, frame->sc.delivered_height); |
412 | break; | 413 | break; |
413 | default: | 414 | default: |
414 | /* It should never get here */ | 415 | /* It should never get here */ |
415 | break; | 416 | break; |
416 | } | 417 | } |
417 | } | 418 | } |
418 | } | 419 | } |
419 | 420 | ||
420 | 421 | ||
421 | static int null_get_property( vo_driver_t* self, | 422 | static int null_get_property( vo_driver_t* self, |
422 | int property ){ | 423 | int property ){ |
423 | return 0; | 424 | return 0; |
424 | } | 425 | } |
425 | static int null_set_property( vo_driver_t* self, | 426 | static int null_set_property( vo_driver_t* self, |
426 | int property, | 427 | int property, |
427 | int value ){ | 428 | int value ){ |
428 | return value; | 429 | return value; |
429 | } | 430 | } |
430 | static void null_get_property_min_max( vo_driver_t* self, | 431 | static void null_get_property_min_max( vo_driver_t* self, |
431 | int property, int *min, | 432 | int property, int *min, |
432 | int *max ){ | 433 | int *max ){ |
433 | *max = 0; | 434 | *max = 0; |
434 | *min = 0; | 435 | *min = 0; |
435 | } | 436 | } |
436 | static int null_gui_data_exchange( vo_driver_t* self, | 437 | static int null_gui_data_exchange( vo_driver_t* self, |
437 | int data_type, | 438 | int data_type, |
438 | void *data ){ | 439 | void *data ){ |
439 | return 0; | 440 | return 0; |
440 | } | 441 | } |
441 | 442 | ||
442 | static void null_dispose ( vo_driver_t* self ){ | 443 | static void null_dispose ( vo_driver_t* self ){ |
443 | null_driver_t* this = (null_driver_t*)self; | 444 | null_driver_t* this = (null_driver_t*)self; |
444 | free ( this ); | 445 | free ( this ); |
445 | } | 446 | } |
446 | static int null_redraw_needed( vo_driver_t* self ){ | 447 | static int null_redraw_needed( vo_driver_t* self ){ |
447 | return 0; | 448 | return 0; |
448 | } | 449 | } |
449 | 450 | ||
450 | 451 | ||
451 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | 452 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, |
452 | void* video, display_xine_frame_t frameDisplayFunc, void *userData ){ | 453 | void* video, display_xine_frame_t frameDisplayFunc, void *userData ){ |
453 | null_driver_t *vo; | 454 | null_driver_t *vo; |
454 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); | 455 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); |
455 | 456 | ||
456 | /* memset? */ | 457 | /* memset? */ |
457 | memset(vo,0, sizeof(null_driver_t ) ); | 458 | memset(vo,0, sizeof(null_driver_t ) ); |
458 | 459 | ||
459 | vo_scale_init (&vo->sc, 0, 0, xine->config); | 460 | vo_scale_init (&vo->sc, 0, 0, xine->config); |
460 | 461 | ||
461 | vo->sc.gui_pixel_aspect = 1.0; | 462 | vo->sc.gui_pixel_aspect = 1.0; |
462 | 463 | ||
463 | vo->m_show_video = 0; // false | 464 | vo->m_show_video = 0; // false |
464 | vo->m_video_fullscreen = 0; | 465 | vo->m_video_fullscreen = 0; |
465 | vo->m_is_scaling = 0; | 466 | vo->m_is_scaling = 0; |
466 | vo->display_ratio = 1.0; | 467 | vo->display_ratio = 1.0; |
467 | vo->gui_width = 16; | 468 | vo->gui_width = 16; |
468 | vo->gui_height = 8; | 469 | vo->gui_height = 8; |
469 | vo->frameDis = NULL; | 470 | vo->frameDis = NULL; |
470 | 471 | ||
471 | /* install callback handlers*/ | 472 | /* install callback handlers*/ |
472 | vo->vo_driver.get_capabilities = null_get_capabilities; | 473 | vo->vo_driver.get_capabilities = null_get_capabilities; |
473 | vo->vo_driver.alloc_frame = null_alloc_frame; | 474 | vo->vo_driver.alloc_frame = null_alloc_frame; |
474 | vo->vo_driver.update_frame_format = null_update_frame_format; | 475 | vo->vo_driver.update_frame_format = null_update_frame_format; |
475 | vo->vo_driver.display_frame = null_display_frame; | 476 | vo->vo_driver.display_frame = null_display_frame; |
476 | vo->vo_driver.overlay_blend = null_overlay_blend; | 477 | vo->vo_driver.overlay_blend = null_overlay_blend; |
477 | vo->vo_driver.get_property = null_get_property; | 478 | vo->vo_driver.get_property = null_get_property; |
478 | vo->vo_driver.set_property = null_set_property; | 479 | vo->vo_driver.set_property = null_set_property; |
479 | vo->vo_driver.get_property_min_max = null_get_property_min_max; | 480 | vo->vo_driver.get_property_min_max = null_get_property_min_max; |
480 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; | 481 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; |
481 | vo->vo_driver.dispose = null_dispose; | 482 | vo->vo_driver.dispose = null_dispose; |
482 | vo->vo_driver.redraw_needed = null_redraw_needed; | 483 | vo->vo_driver.redraw_needed = null_redraw_needed; |
483 | 484 | ||
484 | 485 | ||
485 | /* capabilities */ | 486 | /* capabilities */ |
486 | vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; | 487 | vo->m_capabilities = VO_CAP_COPIES_IMAGE | VO_CAP_YUY2 | VO_CAP_YV12; |
487 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, | 488 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, |
488 | vo->yuv2rgb_cmap); | 489 | vo->yuv2rgb_cmap); |
489 | 490 | ||
490 | vo->caller = userData; | 491 | vo->caller = userData; |
491 | vo->frameDis = frameDisplayFunc; | 492 | vo->frameDis = frameDisplayFunc; |
492 | 493 | ||
493 | /* return ( vo_driver_t*) vo; */ | 494 | /* return ( vo_driver_t*) vo; */ |
494 | return vo_new_port( xine, ( vo_driver_t* )vo ); | 495 | return vo_new_port( xine, ( vo_driver_t* )vo ); |
495 | } | 496 | } |
496 | 497 | ||
497 | #if 0 | 498 | #if 0 |
498 | static vo_info_t vo_info_null = { | 499 | static vo_info_t vo_info_null = { |
499 | 5, | 500 | 5, |
500 | XINE_VISUAL_TYPE_FB | 501 | XINE_VISUAL_TYPE_FB |
501 | }; | 502 | }; |
502 | 503 | ||
503 | vo_info_t *get_video_out_plugin_info(){ | 504 | vo_info_t *get_video_out_plugin_info(){ |
504 | vo_info_null.description = ("xine video output plugin using null device"); | 505 | vo_info_null.description = ("xine video output plugin using null device"); |
505 | return &vo_info_null; | 506 | return &vo_info_null; |
506 | } | 507 | } |
507 | 508 | ||
508 | #endif | 509 | #endif |
509 | 510 | ||
510 | /* this is special for this device */ | 511 | /* this is special for this device */ |
511 | /** | 512 | /** |
512 | * We know that we will be controled by the XINE LIB++ | 513 | * We know that we will be controled by the XINE LIB++ |
513 | */ | 514 | */ |
514 | 515 | ||
515 | /** | 516 | /** |
516 | * | 517 | * |
517 | */ | 518 | */ |
518 | int null_is_showing_video( xine_vo_driver_t* self ){ | 519 | int null_is_showing_video( xine_vo_driver_t* self ){ |
519 | null_driver_t* this = (null_driver_t*)self->driver; | 520 | null_driver_t* this = (null_driver_t*)self->driver; |
520 | return this->m_show_video; | 521 | return this->m_show_video; |
521 | } | 522 | } |
522 | void null_set_show_video( xine_vo_driver_t* self, int show ) { | 523 | void null_set_show_video( xine_vo_driver_t* self, int show ) { |
523 | ((null_driver_t*)self->driver)->m_show_video = show; | 524 | ((null_driver_t*)self->driver)->m_show_video = show; |
524 | } | 525 | } |
525 | 526 | ||
526 | int null_is_fullscreen( xine_vo_driver_t* self ){ | 527 | int null_is_fullscreen( xine_vo_driver_t* self ){ |
527 | return ((null_driver_t*)self->driver)->m_video_fullscreen; | 528 | return ((null_driver_t*)self->driver)->m_video_fullscreen; |
528 | } | 529 | } |
529 | void null_set_fullscreen( xine_vo_driver_t* self, int screen ){ | 530 | void null_set_fullscreen( xine_vo_driver_t* self, int screen ){ |
530 | ((null_driver_t*)self->driver)->m_video_fullscreen = screen; | 531 | ((null_driver_t*)self->driver)->m_video_fullscreen = screen; |
531 | } | 532 | } |
532 | int null_is_scaling( xine_vo_driver_t* self ){ | 533 | int null_is_scaling( xine_vo_driver_t* self ){ |
533 | return ((null_driver_t*)self->driver)->m_is_scaling; | 534 | return ((null_driver_t*)self->driver)->m_is_scaling; |
534 | } | 535 | } |
535 | 536 | ||
536 | void null_set_videoGamma( xine_vo_driver_t* self , int value ) { | 537 | void null_set_videoGamma( xine_vo_driver_t* self , int value ) { |
537 | ((null_driver_t*) self->driver) ->yuv2rgb_gamma = value; | 538 | ((null_driver_t*) self->driver) ->yuv2rgb_gamma = value; |
538 | ((null_driver_t*) self->driver) ->yuv2rgb_factory->set_gamma( ((null_driver_t*) self->driver) ->yuv2rgb_factory, value ); | 539 | ((null_driver_t*) self->driver) ->yuv2rgb_factory->set_gamma( ((null_driver_t*) self->driver) ->yuv2rgb_factory, value ); |
539 | } | 540 | } |
540 | 541 | ||
541 | void null_set_scaling( xine_vo_driver_t* self, int scale ) { | 542 | void null_set_scaling( xine_vo_driver_t* self, int scale ) { |
542 | ((null_driver_t*)self->driver)->m_is_scaling = scale; | 543 | ((null_driver_t*)self->driver)->m_is_scaling = scale; |
543 | } | 544 | } |
544 | 545 | ||
545 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { | 546 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { |
546 | ((null_driver_t*)self->driver)->gui_width = width; | 547 | ((null_driver_t*)self->driver)->gui_width = width; |
547 | } | 548 | } |
548 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { | 549 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { |
549 | ((null_driver_t*)self->driver)->gui_height = height; | 550 | ((null_driver_t*)self->driver)->gui_height = height; |
550 | } | 551 | } |
551 | 552 | ||
552 | 553 | ||
553 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | 554 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { |
554 | null_driver_t* this = (null_driver_t*)self->driver; | 555 | null_driver_t* this = (null_driver_t*)self->driver; |
555 | 556 | ||
556 | this->bytes_per_pixel = (depth + 7 ) / 8; | 557 | this->bytes_per_pixel = (depth + 7 ) / 8; |
557 | this->bpp = this->bytes_per_pixel * 8; | 558 | this->bpp = this->bytes_per_pixel * 8; |
558 | this->depth = depth; | 559 | this->depth = depth; |
559 | printf("depth %d %d\n", depth, this->bpp); | 560 | printf("depth %d %d\n", depth, this->bpp); |
560 | printf("pixeltype %d\n", rgb ); | 561 | printf("pixeltype %d\n", rgb ); |
561 | switch ( this->depth ) { | 562 | switch ( this->depth ) { |
562 | case 32: | 563 | case 32: |
563 | if( rgb == 0 ) | 564 | if( rgb == 0 ) |
564 | this->yuv2rgb_mode = MODE_32_RGB; | 565 | this->yuv2rgb_mode = MODE_32_RGB; |
565 | else | 566 | else |
566 | this->yuv2rgb_mode = MODE_32_BGR; | 567 | this->yuv2rgb_mode = MODE_32_BGR; |
567 | case 24: | 568 | case 24: |
568 | if( this->bpp == 32 ) { | 569 | if( this->bpp == 32 ) { |
569 | if( rgb == 0 ) { | 570 | if( rgb == 0 ) { |
570 | this->yuv2rgb_mode = MODE_32_RGB; | 571 | this->yuv2rgb_mode = MODE_32_RGB; |
571 | } else { | 572 | } else { |
572 | this->yuv2rgb_mode = MODE_32_BGR; | 573 | this->yuv2rgb_mode = MODE_32_BGR; |
573 | } | 574 | } |
574 | }else{ | 575 | }else{ |
575 | if( rgb == 0 ) | 576 | if( rgb == 0 ) |
576 | this->yuv2rgb_mode = MODE_24_RGB; | 577 | this->yuv2rgb_mode = MODE_24_RGB; |
577 | else | 578 | else |
578 | this->yuv2rgb_mode = MODE_24_BGR; | 579 | this->yuv2rgb_mode = MODE_24_BGR; |
579 | }; | 580 | }; |
580 | break; | 581 | break; |
581 | case 16: | 582 | case 16: |
582 | if( rgb == 0 ) { | 583 | if( rgb == 0 ) { |
583 | this->yuv2rgb_mode = MODE_16_RGB; | 584 | this->yuv2rgb_mode = MODE_16_RGB; |
584 | } else { | 585 | } else { |
585 | this->yuv2rgb_mode = MODE_16_BGR; | 586 | this->yuv2rgb_mode = MODE_16_BGR; |
586 | } | 587 | } |
587 | break; | 588 | break; |
588 | case 15: | 589 | case 15: |
589 | if( rgb == 0 ) { | 590 | if( rgb == 0 ) { |
590 | this->yuv2rgb_mode = MODE_15_RGB; | 591 | this->yuv2rgb_mode = MODE_15_RGB; |
591 | } else { | 592 | } else { |
592 | this->yuv2rgb_mode = MODE_15_BGR; | 593 | this->yuv2rgb_mode = MODE_15_BGR; |
593 | } | 594 | } |
594 | break; | 595 | break; |
595 | case 8: | 596 | case 8: |
596 | if( rgb == 0 ) { | 597 | if( rgb == 0 ) { |
597 | this->yuv2rgb_mode = MODE_8_RGB; | 598 | this->yuv2rgb_mode = MODE_8_RGB; |
598 | } else { | 599 | } else { |
599 | this->yuv2rgb_mode = MODE_8_BGR; | 600 | this->yuv2rgb_mode = MODE_8_BGR; |
600 | } | 601 | } |
601 | break; | 602 | break; |
602 | }; | 603 | }; |
603 | //free(this->yuv2rgb_factory ); | 604 | //free(this->yuv2rgb_factory ); |
604 | // this->yuv2rgb_factory = yuv2rgb_factory_init (this->yuv2rgb_mode, this->yuv2rgb_swap, | 605 | // this->yuv2rgb_factory = yuv2rgb_factory_init (this->yuv2rgb_mode, this->yuv2rgb_swap, |
605 | // this->yuv2rgb_cmap); | 606 | // this->yuv2rgb_cmap); |
606 | }; | 607 | }; |
607 | 608 | ||
608 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, | 609 | void null_display_handler( xine_vo_driver_t* self, display_xine_frame_t t, |
609 | void* user_data ) { | 610 | void* user_data ) { |
610 | null_driver_t* this = (null_driver_t*) self->driver; | 611 | null_driver_t* this = (null_driver_t*) self->driver; |
611 | this->caller = user_data; | 612 | this->caller = user_data; |
612 | this->frameDis = t; | 613 | this->frameDis = t; |
613 | } | 614 | } |
614 | 615 | ||
616 | void null_preload_decoders( xine_stream_t *stream ) | ||
617 | { | ||
618 | static const uint32_t preloadedAudioDecoders[] = { BUF_AUDIO_MPEG, BUF_AUDIO_VORBIS }; | ||
619 | static const uint8_t preloadedAudioDecoderCount = sizeof( preloadedAudioDecoders ) / sizeof( preloadedAudioDecoders[ 0 ] ); | ||
620 | static const uint32_t preloadedVideoDecoders[] = { BUF_VIDEO_MPEG, BUF_VIDEO_MPEG4, BUF_VIDEO_DIVX5 }; | ||
621 | static const uint8_t preloadedVideoDecoderCount = sizeof( preloadedVideoDecoders ) / sizeof( preloadedVideoDecoders[ 0 ] ); | ||
622 | |||
623 | uint8_t i; | ||
624 | |||
625 | for ( i = 0; i < preloadedAudioDecoderCount; ++i ) { | ||
626 | audio_decoder_t *decoder = get_audio_decoder( stream, ( preloadedAudioDecoders[ i ] >> 16 ) & 0xff ); | ||
627 | free_audio_decoder( stream, decoder ); | ||
628 | } | ||
629 | |||
630 | for ( i = 0; i < preloadedVideoDecoderCount; ++i ) { | ||
631 | video_decoder_t *decoder = get_video_decoder( stream, ( preloadedVideoDecoders[ i ] >> 16 ) & 0xff ); | ||
632 | free_video_decoder( stream, decoder ); | ||
633 | } | ||
634 | } | ||
635 | |||