-rw-r--r-- | noncore/multimedia/opieplayer2/nullvideo.c | 183 |
1 files changed, 94 insertions, 89 deletions
diff --git a/noncore/multimedia/opieplayer2/nullvideo.c b/noncore/multimedia/opieplayer2/nullvideo.c index 90f62f2..378bbd4 100644 --- a/noncore/multimedia/opieplayer2/nullvideo.c +++ b/noncore/multimedia/opieplayer2/nullvideo.c | |||
@@ -95,121 +95,122 @@ struct opie_frame_s { | |||
95 | 95 | ||
96 | vo_scale_t sc; | 96 | vo_scale_t sc; |
97 | 97 | ||
98 | uint8_t *chunk[3]; | 98 | uint8_t *chunk[3]; |
99 | 99 | ||
100 | uint8_t *data; /* rgb */ | 100 | uint8_t *data; /* rgb */ |
101 | int bytes_per_line; | 101 | int bytes_per_line; |
102 | 102 | ||
103 | yuv2rgb_t *yuv2rgb; | 103 | yuv2rgb_t *yuv2rgb; |
104 | uint8_t *rgb_dst; | 104 | uint8_t *rgb_dst; |
105 | int yuv_stride; | 105 | int yuv_stride; |
106 | int stripe_height, stripe_inc; | 106 | int stripe_height, stripe_inc; |
107 | 107 | ||
108 | null_driver_t *output; | 108 | null_driver_t *output; |
109 | }; | 109 | }; |
110 | 110 | ||
111 | static uint32_t null_get_capabilities( vo_driver_t *self ){ | 111 | static uint32_t null_get_capabilities( vo_driver_t *self ){ |
112 | null_driver_t* this = (null_driver_t*)self; | 112 | null_driver_t* this = (null_driver_t*)self; |
113 | return this->m_capabilities; | 113 | return this->m_capabilities; |
114 | } | 114 | } |
115 | 115 | ||
116 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { | 116 | static void null_frame_copy (vo_frame_t *vo_img, uint8_t **src) { |
117 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 117 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
118 | 118 | ||
119 | vo_img->proc_called = 1; | 119 | vo_img->proc_called = 1; |
120 | 120 | ||
121 | if (!frame->output->m_show_video) { | 121 | if (!frame->output->m_show_video) { |
122 | /* printf("nullvideo: no video\n"); */ | 122 | /* printf("nullvideo: no video\n"); */ |
123 | return; | 123 | return; |
124 | } | 124 | } |
125 | 125 | ||
126 | if (frame->format == XINE_IMGFMT_YV12) { | 126 | if (frame->format == XINE_IMGFMT_YV12) { |
127 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 127 | frame->yuv2rgb->yuv2rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
128 | src[0], src[1], src[2]); | 128 | src[0], src[1], src[2]); |
129 | } else { | 129 | } else { |
130 | 130 | ||
131 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, | 131 | frame->yuv2rgb->yuy22rgb_fun (frame->yuv2rgb, frame->rgb_dst, |
132 | src[0]); | 132 | src[0]); |
133 | } | 133 | } |
134 | 134 | ||
135 | frame->rgb_dst += frame->stripe_inc; | 135 | frame->rgb_dst += frame->stripe_inc; |
136 | } | 136 | } |
137 | 137 | ||
138 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { | 138 | static void null_frame_field (vo_frame_t *vo_img, int which_field) { |
139 | 139 | ||
140 | opie_frame_t *frame = (opie_frame_t *) vo_img ; | 140 | opie_frame_t *frame = (opie_frame_t *) vo_img ; |
141 | 141 | ||
142 | switch (which_field) { | 142 | switch (which_field) { |
143 | case VO_TOP_FIELD: | 143 | case VO_TOP_FIELD: |
144 | frame->rgb_dst = (uint8_t *)frame->data; | 144 | frame->rgb_dst = (uint8_t *)frame->data; |
145 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; | 145 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; |
146 | break; | 146 | break; |
147 | case VO_BOTTOM_FIELD: | 147 | case VO_BOTTOM_FIELD: |
148 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 148 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
149 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; | 149 | frame->stripe_inc = 2*frame->stripe_height * frame->bytes_per_line; |
150 | break; | 150 | break; |
151 | case VO_BOTH_FIELDS: | 151 | case VO_BOTH_FIELDS: |
152 | frame->rgb_dst = (uint8_t *)frame->data; | 152 | frame->rgb_dst = (uint8_t *)frame->data; |
153 | break; | 153 | break; |
154 | } | 154 | } |
155 | } | 155 | } |
156 | 156 | ||
157 | 157 | ||
158 | /* take care of the frame*/ | 158 | /* take care of the frame*/ |
159 | static void null_frame_dispose( vo_frame_t* vo_img){ | 159 | static void null_frame_dispose( vo_frame_t* vo_img){ |
160 | opie_frame_t* frame = (opie_frame_t*)vo_img; | 160 | opie_frame_t* frame = (opie_frame_t*)vo_img; |
161 | 161 | ||
162 | if (frame->data) | 162 | if (frame->data) |
163 | free( frame->data ); | 163 | free( frame->data ); |
164 | free (frame); | 164 | free (frame); |
165 | } | 165 | } |
166 | 166 | ||
167 | /* end take care of frames*/ | 167 | /* end take care of frames*/ |
168 | 168 | ||
169 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ | 169 | static vo_frame_t* null_alloc_frame( vo_driver_t* self ){ |
170 | 170 | ||
171 | null_driver_t* this = (null_driver_t*)self; | 171 | null_driver_t* this = (null_driver_t*)self; |
172 | opie_frame_t* frame; | 172 | opie_frame_t* frame; |
173 | 173 | ||
174 | #ifdef LOG | 174 | #ifdef LOG |
175 | fprintf (stderr, "nullvideo: alloc_frame\n"); | 175 | fprintf (stderr, "nullvideo: alloc_frame\n"); |
176 | #endif | 176 | #endif |
177 | 177 | ||
178 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); | 178 | frame = (opie_frame_t*)malloc ( sizeof(opie_frame_t) ); |
179 | 179 | ||
180 | memset( frame, 0, sizeof( opie_frame_t) ); | 180 | memset( frame, 0, sizeof( opie_frame_t) ); |
181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); | 181 | memcpy (&frame->sc, &this->sc, sizeof(vo_scale_t)); |
182 | 182 | ||
183 | pthread_mutex_init (&frame->frame.mutex, NULL); | 183 | pthread_mutex_init (&frame->frame.mutex, NULL); |
184 | 184 | ||
185 | frame->output = this; | 185 | frame->output = this; |
186 | 186 | ||
187 | /* initialize the frame*/ | 187 | /* initialize the frame*/ |
188 | frame->frame.driver = self; | 188 | frame->frame.driver = self; |
189 | frame->frame.proc_slice = null_frame_copy; | 189 | frame->frame.proc_slice = null_frame_copy; |
190 | frame->frame.field = null_frame_field; | 190 | frame->frame.field = null_frame_field; |
191 | frame->frame.dispose = null_frame_dispose; | 191 | frame->frame.dispose = null_frame_dispose; |
192 | 192 | ||
193 | /* | 193 | /* |
194 | * colorspace converter for this frame | 194 | * colorspace converter for this frame |
195 | */ | 195 | */ |
196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); | 196 | frame->yuv2rgb = this->yuv2rgb_factory->create_converter (this->yuv2rgb_factory); |
197 | 197 | ||
198 | return (vo_frame_t*) frame; | 198 | return (vo_frame_t*) frame; |
199 | } | 199 | } |
200 | 200 | ||
201 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | 201 | static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, |
202 | uint32_t width, uint32_t height, | 202 | uint32_t width, uint32_t height, |
203 | double ratio_code, int format, int flags ){ | 203 | double ratio_code, int format, |
204 | int flags ){ | ||
204 | null_driver_t* this = (null_driver_t*) self; | 205 | null_driver_t* this = (null_driver_t*) self; |
205 | opie_frame_t* frame = (opie_frame_t*)img; | 206 | opie_frame_t* frame = (opie_frame_t*)img; |
206 | /* not needed now */ | 207 | /* not needed now */ |
207 | 208 | ||
208 | #ifdef LOG | 209 | #ifdef LOG |
209 | fprintf (stderr, "nullvideo: update_frame_format\n"); | 210 | fprintf (stderr, "nullvideo: update_frame_format\n"); |
210 | #endif | 211 | #endif |
211 | 212 | ||
212 | flags &= VO_BOTH_FIELDS; | 213 | flags &= VO_BOTH_FIELDS; |
213 | 214 | ||
214 | /* find out if we need to adapt this frame */ | 215 | /* find out if we need to adapt this frame */ |
215 | 216 | ||
@@ -226,100 +227,100 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | |||
226 | frame->sc.delivered_height = height; | 227 | frame->sc.delivered_height = height; |
227 | frame->sc.delivered_ratio = ratio_code; | 228 | frame->sc.delivered_ratio = ratio_code; |
228 | frame->flags = flags; | 229 | frame->flags = flags; |
229 | frame->format = format; | 230 | frame->format = format; |
230 | frame->sc.user_ratio = this->sc.user_ratio; | 231 | frame->sc.user_ratio = this->sc.user_ratio; |
231 | frame->sc.gui_width = this->gui_width; | 232 | frame->sc.gui_width = this->gui_width; |
232 | frame->sc.gui_height = this->gui_height; | 233 | frame->sc.gui_height = this->gui_height; |
233 | frame->sc.gui_pixel_aspect = 1.0; | 234 | frame->sc.gui_pixel_aspect = 1.0; |
234 | 235 | ||
235 | vo_scale_compute_ideal_size ( &frame->sc ); | 236 | vo_scale_compute_ideal_size ( &frame->sc ); |
236 | vo_scale_compute_output_size( &frame->sc ); | 237 | vo_scale_compute_output_size( &frame->sc ); |
237 | 238 | ||
238 | #ifdef LOG | 239 | #ifdef LOG |
239 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", | 240 | fprintf (stderr, "nullvideo: gui %dx%d delivered %dx%d output %dx%d\n", |
240 | frame->sc.gui_width, frame->sc.gui_height, | 241 | frame->sc.gui_width, frame->sc.gui_height, |
241 | frame->sc.delivered_width, frame->sc.delivered_height, | 242 | frame->sc.delivered_width, frame->sc.delivered_height, |
242 | frame->sc.output_width, frame->sc.output_height); | 243 | frame->sc.output_width, frame->sc.output_height); |
243 | #endif | 244 | #endif |
244 | 245 | ||
245 | /* | 246 | /* |
246 | * (re-) allocate | 247 | * (re-) allocate |
247 | */ | 248 | */ |
248 | if( frame->data ) { | 249 | if( frame->data ) { |
249 | if( frame->chunk[0] ){ | 250 | if( frame->chunk[0] ){ |
250 | free( frame->chunk[0] ); | 251 | free( frame->chunk[0] ); |
251 | frame->chunk[0] = NULL; | 252 | frame->chunk[0] = NULL; |
252 | } | 253 | } |
253 | if( frame->chunk[1] ){ | 254 | if( frame->chunk[1] ){ |
254 | free ( frame->chunk[1] ); | 255 | free ( frame->chunk[1] ); |
255 | frame->chunk[1] = NULL; | 256 | frame->chunk[1] = NULL; |
256 | } | 257 | } |
257 | if( frame->chunk[2] ){ | 258 | if( frame->chunk[2] ){ |
258 | free ( frame->chunk[2] ); | 259 | free ( frame->chunk[2] ); |
259 | frame->chunk[2] = NULL; | 260 | frame->chunk[2] = NULL; |
260 | } | 261 | } |
261 | free ( frame->data ); | 262 | free ( frame->data ); |
262 | } | 263 | } |
263 | 264 | ||
264 | frame->data = xine_xmalloc (frame->sc.output_width | 265 | frame->data = xine_xmalloc (frame->sc.output_width |
265 | * frame->sc.output_height | 266 | * frame->sc.output_height |
266 | * this->bytes_per_pixel ); | 267 | * this->bytes_per_pixel ); |
267 | 268 | ||
268 | if( format == XINE_IMGFMT_YV12 ) { | 269 | if( format == XINE_IMGFMT_YV12 ) { |
269 | frame->frame.pitches[0] = 8*((width + 7) / 8); | 270 | frame->frame.pitches[0] = 8*((width + 7) / 8); |
270 | frame->frame.pitches[1] = 8*((width + 15) / 16); | 271 | frame->frame.pitches[1] = 8*((width + 15) / 16); |
271 | frame->frame.pitches[2] = 8*((width + 15) / 16); | 272 | frame->frame.pitches[2] = 8*((width + 15) / 16); |
272 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); | 273 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height,(void **)&frame->chunk[0]); |
273 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); | 274 | frame->frame.base[1] = xine_xmalloc_aligned (16, frame->frame.pitches[1] * ((height+ 1)/2), (void **)&frame->chunk[1]); |
274 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); | 275 | frame->frame.base[2] = xine_xmalloc_aligned (16, frame->frame.pitches[2] * ((height+ 1)/2), (void **)&frame->chunk[2]); |
275 | 276 | ||
276 | }else{ | 277 | }else{ |
277 | frame->frame.pitches[0] = 8*((width + 3) / 4); | 278 | frame->frame.pitches[0] = 8*((width + 3) / 4); |
278 | 279 | ||
279 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, | 280 | frame->frame.base[0] = xine_xmalloc_aligned (16, frame->frame.pitches[0] * height, |
280 | (void **)&frame->chunk[0]); | 281 | (void **)&frame->chunk[0]); |
281 | frame->chunk[1] = NULL; | 282 | frame->chunk[1] = NULL; |
282 | frame->chunk[2] = NULL; | 283 | frame->chunk[2] = NULL; |
283 | } | 284 | } |
284 | 285 | ||
285 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; | 286 | frame->stripe_height = 16 * frame->sc.output_height / frame->sc.delivered_height; |
286 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; | 287 | frame->bytes_per_line = frame->sc.output_width * this->bytes_per_pixel; |
287 | 288 | ||
288 | /* | 289 | /* |
289 | * set up colorspace converter | 290 | * set up colorspace converter |
290 | */ | 291 | */ |
291 | 292 | ||
292 | switch (flags) { | 293 | switch (flags) { |
293 | case VO_TOP_FIELD: | 294 | case VO_TOP_FIELD: |
294 | case VO_BOTTOM_FIELD: | 295 | case VO_BOTTOM_FIELD: |
295 | frame->yuv2rgb->configure (frame->yuv2rgb, | 296 | frame->yuv2rgb->configure (frame->yuv2rgb, |
296 | frame->sc.delivered_width, | 297 | frame->sc.delivered_width, |
297 | 16, | 298 | 16, |
298 | 2*frame->frame.pitches[0], | 299 | 2*frame->frame.pitches[0], |
299 | 2*frame->frame.pitches[1], | 300 | 2*frame->frame.pitches[1], |
300 | frame->sc.output_width, | 301 | frame->sc.output_width, |
301 | frame->stripe_height, | 302 | frame->stripe_height, |
302 | frame->bytes_per_line*2); | 303 | frame->bytes_per_line*2); |
303 | frame->yuv_stride = frame->bytes_per_line*2; | 304 | frame->yuv_stride = frame->bytes_per_line*2; |
304 | break; | 305 | break; |
305 | case VO_BOTH_FIELDS: | 306 | case VO_BOTH_FIELDS: |
306 | frame->yuv2rgb->configure (frame->yuv2rgb, | 307 | frame->yuv2rgb->configure (frame->yuv2rgb, |
307 | frame->sc.delivered_width, | 308 | frame->sc.delivered_width, |
308 | 16, | 309 | 16, |
309 | frame->frame.pitches[0], | 310 | frame->frame.pitches[0], |
310 | frame->frame.pitches[1], | 311 | frame->frame.pitches[1], |
311 | frame->sc.output_width, | 312 | frame->sc.output_width, |
312 | frame->stripe_height, | 313 | frame->stripe_height, |
313 | frame->bytes_per_line); | 314 | frame->bytes_per_line); |
314 | frame->yuv_stride = frame->bytes_per_line; | 315 | frame->yuv_stride = frame->bytes_per_line; |
315 | break; | 316 | break; |
316 | } | 317 | } |
317 | #ifdef LOG | 318 | #ifdef LOG |
318 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); | 319 | fprintf (stderr, "nullvideo: colorspace converter configured.\n"); |
319 | #endif | 320 | #endif |
320 | } | 321 | } |
321 | 322 | ||
322 | /* | 323 | /* |
323 | * reset dest pointers | 324 | * reset dest pointers |
324 | */ | 325 | */ |
325 | 326 | ||
@@ -329,139 +330,143 @@ static void null_update_frame_format( vo_driver_t* self, vo_frame_t* img, | |||
329 | frame->rgb_dst = (uint8_t *)frame->data; | 330 | frame->rgb_dst = (uint8_t *)frame->data; |
330 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 331 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
331 | break; | 332 | break; |
332 | case VO_BOTTOM_FIELD: | 333 | case VO_BOTTOM_FIELD: |
333 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; | 334 | frame->rgb_dst = (uint8_t *)frame->data + frame->bytes_per_line ; |
334 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; | 335 | frame->stripe_inc = 2 * frame->stripe_height * frame->bytes_per_line; |
335 | break; | 336 | break; |
336 | case VO_BOTH_FIELDS: | 337 | case VO_BOTH_FIELDS: |
337 | frame->rgb_dst = (uint8_t *)frame->data; | 338 | frame->rgb_dst = (uint8_t *)frame->data; |
338 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; | 339 | frame->stripe_inc = frame->stripe_height * frame->bytes_per_line; |
339 | break; | 340 | break; |
340 | } | 341 | } |
341 | } | 342 | } |
342 | } | 343 | } |
343 | 344 | ||
344 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ | 345 | static void null_display_frame( vo_driver_t* self, vo_frame_t *frame_gen ){ |
345 | null_driver_t* this = (null_driver_t*) self; | 346 | null_driver_t* this = (null_driver_t*) self; |
346 | opie_frame_t* frame = (opie_frame_t*)frame_gen; | 347 | opie_frame_t* frame = (opie_frame_t*)frame_gen; |
347 | display_xine_frame_t display = this->frameDis; | 348 | display_xine_frame_t display = this->frameDis; |
348 | 349 | ||
349 | if (!this->m_show_video) | 350 | if (!this->m_show_video) |
350 | return; | 351 | return; |
351 | 352 | ||
352 | if( display != NULL ) { | 353 | if( display != NULL ) { |
353 | (*display)(this->caller, frame->data, | 354 | (*display)(this->caller, frame->data, |
354 | frame->sc.output_width, frame->sc.output_height, | 355 | frame->sc.output_width, frame->sc.output_height, |
355 | frame->bytes_per_line ); | 356 | frame->bytes_per_line ); |
356 | } | 357 | } |
357 | 358 | ||
358 | frame->frame.free(&frame->frame); | 359 | frame->frame.free(&frame->frame); |
359 | } | 360 | } |
360 | 361 | ||
361 | 362 | ||
362 | /* blending related */ | 363 | /* blending related */ |
363 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, | 364 | static void null_overlay_clut_yuv2rgb (null_driver_t *this, |
364 | vo_overlay_t *overlay, | 365 | vo_overlay_t *overlay, |
365 | opie_frame_t *frame) { | 366 | opie_frame_t *frame) { |
366 | int i; | 367 | int i; |
367 | clut_t* clut = (clut_t*) overlay->color; | 368 | clut_t* clut = (clut_t*) overlay->color; |
368 | if (!overlay->rgb_clut) { | 369 | if (!overlay->rgb_clut) { |
369 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 370 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
370 | *((uint32_t *)&clut[i]) = | 371 | *((uint32_t *)&clut[i]) = |
371 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, | 372 | frame->yuv2rgb->yuv2rgb_single_pixel_fun (frame->yuv2rgb, |
372 | clut[i].y, clut[i].cb, clut[i].cr); | 373 | clut[i].y, clut[i].cb, |
374 | clut[i].cr); | ||
373 | } | 375 | } |
374 | overlay->rgb_clut++; | 376 | overlay->rgb_clut++; |
375 | } | 377 | } |
376 | if (!overlay->clip_rgb_clut) { | 378 | if (!overlay->clip_rgb_clut) { |
377 | clut = (clut_t*) overlay->clip_color; | 379 | clut = (clut_t*) overlay->clip_color; |
378 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { | 380 | for (i = 0; i < sizeof(overlay->color)/sizeof(overlay->color[0]); i++) { |
379 | *((uint32_t *)&clut[i]) = | 381 | *((uint32_t *)&clut[i]) = |
380 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, | 382 | frame->yuv2rgb->yuv2rgb_single_pixel_fun(frame->yuv2rgb, |
381 | clut[i].y, clut[i].cb, clut[i].cr); | 383 | clut[i].y, clut[i].cb, clut[i].cr); |
382 | } | 384 | } |
383 | overlay->clip_rgb_clut++; | 385 | overlay->clip_rgb_clut++; |
384 | } | 386 | } |
385 | } | 387 | } |
386 | 388 | ||
387 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, vo_overlay_t *overlay) { | 389 | static void null_overlay_blend ( vo_driver_t *this_gen, vo_frame_t *frame_gen, |
390 | vo_overlay_t *overlay) { | ||
388 | null_driver_t *this = (null_driver_t *) this_gen; | 391 | null_driver_t *this = (null_driver_t *) this_gen; |
389 | opie_frame_t *frame = (opie_frame_t *) frame_gen; | 392 | opie_frame_t *frame = (opie_frame_t *) frame_gen; |
390 | 393 | ||
391 | if(!this->m_show_video || frame->sc.output_width == 0 | 394 | if(!this->m_show_video || frame->sc.output_width == 0 |
392 | || frame->sc.output_height== 0) | 395 | || frame->sc.output_height== 0) |
393 | return; | 396 | return; |
394 | 397 | ||
395 | /* Alpha Blend here */ | 398 | /* Alpha Blend here */ |
396 | if (overlay->rle) { | 399 | if (overlay->rle) { |
397 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) | 400 | if( !overlay->rgb_clut || !overlay->clip_rgb_clut) |
398 | null_overlay_clut_yuv2rgb(this,overlay,frame); | 401 | null_overlay_clut_yuv2rgb(this,overlay,frame); |
399 | 402 | ||
400 | switch(this->bpp) { | 403 | switch(this->bpp) { |
401 | case 16: | 404 | case 16: |
402 | blend_rgb16( (uint8_t *)frame->data, overlay, | 405 | blend_rgb16( (uint8_t *)frame->data, overlay, |
403 | frame->sc.output_width, frame->sc.output_height, | 406 | frame->sc.output_width, frame->sc.output_height, |
404 | frame->sc.delivered_width, frame->sc.delivered_height); | 407 | frame->sc.delivered_width, frame->sc.delivered_height); |
405 | break; | 408 | break; |
406 | case 24: | 409 | case 24: |
407 | blend_rgb24( (uint8_t *)frame->data, overlay, | 410 | blend_rgb24( (uint8_t *)frame->data, overlay, |
408 | frame->sc.output_width, frame->sc.output_height, | 411 | frame->sc.output_width, frame->sc.output_height, |
409 | frame->sc.delivered_width, frame->sc.delivered_height); | 412 | frame->sc.delivered_width, frame->sc.delivered_height); |
410 | break; | 413 | break; |
411 | case 32: | 414 | case 32: |
412 | blend_rgb32( (uint8_t *)frame->data, overlay, | 415 | blend_rgb32( (uint8_t *)frame->data, overlay, |
413 | frame->sc.output_width, frame->sc.output_height, | 416 | frame->sc.output_width, frame->sc.output_height, |
414 | frame->sc.delivered_width, frame->sc.delivered_height); | 417 | frame->sc.delivered_width, frame->sc.delivered_height); |
415 | break; | 418 | break; |
416 | default: | 419 | default: |
417 | /* It should never get here */ | 420 | /* It should never get here */ |
418 | break; | 421 | break; |
419 | } | 422 | } |
420 | } | 423 | } |
421 | } | 424 | } |
422 | 425 | ||
423 | 426 | ||
424 | static int null_get_property( vo_driver_t* self, | 427 | static int null_get_property( vo_driver_t* self, |
425 | int property ){ | 428 | int property ){ |
426 | return 0; | 429 | return 0; |
427 | } | 430 | } |
428 | static int null_set_property( vo_driver_t* self, | 431 | static int null_set_property( vo_driver_t* self, |
429 | int property, | 432 | int property, |
430 | int value ){ | 433 | int value ){ |
431 | return value; | 434 | return value; |
432 | } | 435 | } |
433 | static void null_get_property_min_max( vo_driver_t* self, | 436 | static void null_get_property_min_max( vo_driver_t* self, |
434 | int property, int *min, | 437 | int property, int *min, |
435 | int *max ){ | 438 | int *max ){ |
436 | *max = 0; | 439 | *max = 0; |
437 | *min = 0; | 440 | *min = 0; |
438 | } | 441 | } |
439 | static int null_gui_data_exchange( vo_driver_t* self, | 442 | static int null_gui_data_exchange( vo_driver_t* self, |
440 | int data_type, | 443 | int data_type, |
441 | void *data ){ | 444 | void *data ){ |
442 | return 0; | 445 | return 0; |
443 | } | 446 | } |
444 | 447 | ||
445 | static void null_dispose ( vo_driver_t* self ){ | 448 | static void null_dispose ( vo_driver_t* self ){ |
446 | null_driver_t* this = (null_driver_t*)self; | 449 | null_driver_t* this = (null_driver_t*)self; |
447 | free ( this ); | 450 | free ( this ); |
448 | } | 451 | } |
449 | static int null_redraw_needed( vo_driver_t* self ){ | 452 | static int null_redraw_needed( vo_driver_t* self ){ |
450 | return 0; | 453 | return 0; |
451 | } | 454 | } |
452 | 455 | ||
453 | 456 | ||
454 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | 457 | xine_vo_driver_t* init_video_out_plugin( xine_t *xine, |
455 | void* video, display_xine_frame_t frameDisplayFunc, void *userData ){ | 458 | void* video, |
459 | display_xine_frame_t frameDisplayFunc, | ||
460 | void *userData ){ | ||
456 | null_driver_t *vo; | 461 | null_driver_t *vo; |
457 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); | 462 | vo = (null_driver_t*)malloc( sizeof(null_driver_t ) ); |
458 | 463 | ||
459 | /* memset? */ | 464 | /* memset? */ |
460 | memset(vo,0, sizeof(null_driver_t ) ); | 465 | memset(vo,0, sizeof(null_driver_t ) ); |
461 | 466 | ||
462 | vo_scale_init (&vo->sc, 0, 0, xine->config); | 467 | vo_scale_init (&vo->sc, 0, 0, xine->config); |
463 | 468 | ||
464 | vo->sc.gui_pixel_aspect = 1.0; | 469 | vo->sc.gui_pixel_aspect = 1.0; |
465 | 470 | ||
466 | vo->m_show_video = 0; // false | 471 | vo->m_show_video = 0; // false |
467 | vo->m_video_fullscreen = 0; | 472 | vo->m_video_fullscreen = 0; |
@@ -478,26 +483,26 @@ xine_vo_driver_t* init_video_out_plugin( xine_t *xine, | |||
478 | vo->vo_driver.display_frame = null_display_frame; | 483 | vo->vo_driver.display_frame = null_display_frame; |
479 | vo->vo_driver.overlay_blend = null_overlay_blend; | 484 | vo->vo_driver.overlay_blend = null_overlay_blend; |
480 | vo->vo_driver.get_property = null_get_property; | 485 | vo->vo_driver.get_property = null_get_property; |
481 | vo->vo_driver.set_property = null_set_property; | 486 | vo->vo_driver.set_property = null_set_property; |
482 | vo->vo_driver.get_property_min_max = null_get_property_min_max; | 487 | vo->vo_driver.get_property_min_max = null_get_property_min_max; |
483 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; | 488 | vo->vo_driver.gui_data_exchange = null_gui_data_exchange; |
484 | vo->vo_driver.dispose = null_dispose; | 489 | vo->vo_driver.dispose = null_dispose; |
485 | vo->vo_driver.redraw_needed = null_redraw_needed; | 490 | vo->vo_driver.redraw_needed = null_redraw_needed; |
486 | 491 | ||
487 | 492 | ||
488 | /* capabilities */ | 493 | /* capabilities */ |
489 | vo->m_capabilities = /* VO_CAP_COPIES_IMAGE | */ VO_CAP_YUY2 | VO_CAP_YV12; | 494 | vo->m_capabilities = /* VO_CAP_COPIES_IMAGE | */ VO_CAP_YUY2 | VO_CAP_YV12; |
490 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, | 495 | vo->yuv2rgb_factory = yuv2rgb_factory_init (MODE_16_RGB, vo->yuv2rgb_swap, |
491 | vo->yuv2rgb_cmap); | 496 | vo->yuv2rgb_cmap); |
492 | 497 | ||
493 | vo->caller = userData; | 498 | vo->caller = userData; |
494 | vo->frameDis = frameDisplayFunc; | 499 | vo->frameDis = frameDisplayFunc; |
495 | 500 | ||
496 | /* return ( vo_driver_t*) vo; */ | 501 | /* return ( vo_driver_t*) vo; */ |
497 | return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); | 502 | return vo_new_port( xine, ( vo_driver_t* )vo, 0 ); |
498 | } | 503 | } |
499 | 504 | ||
500 | #if 0 | 505 | #if 0 |
501 | static vo_info_t vo_info_null = { | 506 | static vo_info_t vo_info_null = { |
502 | 5, | 507 | 5, |
503 | XINE_VISUAL_TYPE_FB | 508 | XINE_VISUAL_TYPE_FB |
@@ -535,52 +540,52 @@ void null_set_fullscreen( xine_vo_driver_t* self, int screen ){ | |||
535 | int null_is_scaling( xine_vo_driver_t* self ){ | 540 | int null_is_scaling( xine_vo_driver_t* self ){ |
536 | return ((null_driver_t*)self->driver)->m_is_scaling; | 541 | return ((null_driver_t*)self->driver)->m_is_scaling; |
537 | } | 542 | } |
538 | 543 | ||
539 | void null_set_videoGamma( xine_vo_driver_t* self , int value ) { | 544 | void null_set_videoGamma( xine_vo_driver_t* self , int value ) { |
540 | ((null_driver_t*) self->driver) ->yuv2rgb_gamma = value; | 545 | ((null_driver_t*) self->driver) ->yuv2rgb_gamma = value; |
541 | ((null_driver_t*) self->driver) ->yuv2rgb_factory->set_gamma( ((null_driver_t*) self->driver) ->yuv2rgb_factory, value ); | 546 | ((null_driver_t*) self->driver) ->yuv2rgb_factory->set_gamma( ((null_driver_t*) self->driver) ->yuv2rgb_factory, value ); |
542 | } | 547 | } |
543 | 548 | ||
544 | void null_set_scaling( xine_vo_driver_t* self, int scale ) { | 549 | void null_set_scaling( xine_vo_driver_t* self, int scale ) { |
545 | ((null_driver_t*)self->driver)->m_is_scaling = scale; | 550 | ((null_driver_t*)self->driver)->m_is_scaling = scale; |
546 | } | 551 | } |
547 | 552 | ||
548 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { | 553 | void null_set_gui_width( xine_vo_driver_t* self, int width ) { |
549 | ((null_driver_t*)self->driver)->gui_width = width; | 554 | ((null_driver_t*)self->driver)->gui_width = width; |
550 | } | 555 | } |
551 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { | 556 | void null_set_gui_height( xine_vo_driver_t* self, int height ) { |
552 | ((null_driver_t*)self->driver)->gui_height = height; | 557 | ((null_driver_t*)self->driver)->gui_height = height; |
553 | } | 558 | } |
554 | 559 | ||
555 | 560 | ||
556 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { | 561 | void null_set_mode( xine_vo_driver_t* self, int depth, int rgb ) { |
557 | null_driver_t* this = (null_driver_t*)self->driver; | 562 | null_driver_t* this = (null_driver_t*)self->driver; |
558 | 563 | ||
559 | this->bytes_per_pixel = (depth + 7 ) / 8; | 564 | this->bytes_per_pixel = (depth + 7 ) / 8; |
560 | this->bpp = this->bytes_per_pixel * 8; | 565 | this->bpp = this->bytes_per_pixel * 8; |
561 | this->depth = depth; | 566 | this->depth = depth; |
562 | printf("depth %d %d\n", depth, this->bpp); | 567 | printf("depth %d %d\n", depth, this->bpp); |
563 | printf("pixeltype %d\n", rgb ); | 568 | printf("pixeltype %d\n", rgb ); |
564 | switch ( this->depth ) { | 569 | switch ( this->depth ) { |
565 | case 32: | 570 | case 32: |
566 | if( rgb == 0 ) | 571 | if( rgb == 0 ) |
567 | this->yuv2rgb_mode = MODE_32_RGB; | 572 | this->yuv2rgb_mode = MODE_32_RGB; |
568 | else | 573 | else |
569 | this->yuv2rgb_mode = MODE_32_BGR; | 574 | this->yuv2rgb_mode = MODE_32_BGR; |
570 | case 24: | 575 | case 24: |
571 | if( this->bpp == 32 ) { | 576 | if( this->bpp == 32 ) { |
572 | if( rgb == 0 ) { | 577 | if( rgb == 0 ) { |
573 | this->yuv2rgb_mode = MODE_32_RGB; | 578 | this->yuv2rgb_mode = MODE_32_RGB; |
574 | } else { | 579 | } else { |
575 | this->yuv2rgb_mode = MODE_32_BGR; | 580 | this->yuv2rgb_mode = MODE_32_BGR; |
576 | } | 581 | } |
577 | }else{ | 582 | }else{ |
578 | if( rgb == 0 ) | 583 | if( rgb == 0 ) |
579 | this->yuv2rgb_mode = MODE_24_RGB; | 584 | this->yuv2rgb_mode = MODE_24_RGB; |
580 | else | 585 | else |
581 | this->yuv2rgb_mode = MODE_24_BGR; | 586 | this->yuv2rgb_mode = MODE_24_BGR; |
582 | }; | 587 | }; |
583 | break; | 588 | break; |
584 | case 16: | 589 | case 16: |
585 | if( rgb == 0 ) { | 590 | if( rgb == 0 ) { |
586 | this->yuv2rgb_mode = MODE_16_RGB; | 591 | this->yuv2rgb_mode = MODE_16_RGB; |