xref: /haiku/src/add-ons/media/plugins/ffmpeg/gfx_util.cpp (revision 5a695bce105f327b826d66fb194d6564d6c3580a)
1 #include "gfx_util.h"
2 
3 #include <strings.h>
4 #include <stdio.h>
5 
6 #include "CpuCapabilities.h"
7 #include "gfx_conv_c.h"
8 #include "gfx_conv_mmx.h"
9 
10 
11 // ref docs
12 // http://www.joemaller.com/fcp/fxscript_yuv_color.shtml
13 
14 
15 #if 1
16   #define TRACE(a...) printf(a)
17 #else
18   #define TRACE(a...)
19 #endif
20 
21 
22 //! This function will try to find the best colorspaces for both the ff-codec
23 // and the Media Kit sides.
24 gfx_convert_func
25 resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
26 	int height)
27 {
28 	CPUCapabilities cpu;
29 
30 	switch (colorSpace) {
31 		case B_RGB32:
32 			if (pixelFormat == PIX_FMT_YUV410P) {
33 //				if (cpu.HasMMX()) {
34 //					TRACE("resolve_colorspace: gfx_conv_yuv410p_rgb32_mmx\n");
35 //					return gfx_conv_yuv410p_rgb32_mmx;
36 //				} else {
37 					TRACE("resolve_colorspace: gfx_conv_yuv410p_rgb32_c\n");
38 					return gfx_conv_yuv410p_rgb32_c;
39 //				}
40 			}
41 
42 			if (pixelFormat == PIX_FMT_YUV411P) {
43 //				if (cpu.HasMMX()) {
44 //					TRACE("resolve_colorspace: gfx_conv_yuv411p_rgb32_mmx\n");
45 //					return gfx_conv_yuv411p_rgb32_mmx;
46 //				} else {
47 					TRACE("resolve_colorspace: gfx_conv_yuv411p_rgb32_c\n");
48 					return gfx_conv_yuv411p_rgb32_c;
49 //				}
50 			}
51 
52 			if (pixelFormat == PIX_FMT_YUV420P
53 				|| pixelFormat == PIX_FMT_YUVJ420P) {
54 				if (cpu.HasSSE2() && width % 8 == 0 && height % 2 == 0) {
55 					TRACE("resolve_colorspace: "
56 						"gfx_conv_yuv420p_rgba32_sse2\n");
57 					return gfx_conv_yuv420p_rgba32_sse2;
58 				} else if (cpu.HasSSE1() && width % 4 == 0
59 					&& height % 2 == 0) {
60 					TRACE("resolve_colorspace: gfx_conv_yuv420p_rgba32_sse\n");
61 					return gfx_conv_yuv420p_rgba32_sse;
62 				} else {
63 					TRACE("resolve_colorspace: gfx_conv_YCbCr420p_RGB32_c\n");
64 					return gfx_conv_YCbCr420p_RGB32_c;
65 				}
66 			}
67 
68 			if (pixelFormat == PIX_FMT_YUV422P
69 				|| pixelFormat == PIX_FMT_YUVJ422P) {
70 				if (cpu.HasSSE2() && width % 8 == 0)
71 					return gfx_conv_yuv422p_rgba32_sse2;
72 				else if (cpu.HasSSE1() && width % 4 == 0)
73 					return gfx_conv_yuv422p_rgba32_sse;
74 				else
75 					return gfx_conv_YCbCr422_RGB32_c;
76 			}
77 
78 			TRACE("resolve_colorspace: %s => B_RGB32: NULL\n",
79 				pixfmt_to_string(pixelFormat));
80 			return NULL;
81 
82 		case B_RGB24_BIG:
83 			TRACE("resolve_colorspace: %s => B_RGB24_BIG: NULL\n",
84 				pixfmt_to_string(pixelFormat));
85 			return NULL;
86 
87 		case B_RGB24:
88 			TRACE("resolve_colorspace: %s => B_RGB24: NULL\n",
89 				pixfmt_to_string(pixelFormat));
90 			return NULL;
91 
92 		case B_YCbCr422:
93 
94 			if (pixelFormat == PIX_FMT_YUV410P) {
95 //				if (cpu.HasMMX()) {
96 //					TRACE("resolve_colorspace: "
97 //						"gfx_conv_yuv410p_ycbcr422_mmx\n");
98 //					return gfx_conv_yuv410p_ycbcr422_mmx;
99 //				} else {
100 					TRACE("resolve_colorspace: gfx_conv_yuv410p_ycbcr422_c\n");
101 					return gfx_conv_yuv410p_ycbcr422_c;
102 //				}
103 			}
104 
105 			if (pixelFormat == PIX_FMT_YUV411P) {
106 //				if (cpu.HasMMX()) {
107 //					TRACE("resolve_colorspace: "
108 //						"gfx_conv_yuv411p_ycbcr422_mmx\n");
109 //					return gfx_conv_yuv411p_ycbcr422_mmx;
110 //				} else {
111 					TRACE("resolve_colorspace: gfx_conv_yuv411p_ycbcr422_c\n");
112 					return gfx_conv_yuv411p_ycbcr422_c;
113 //				}
114 			}
115 
116 			if (pixelFormat == PIX_FMT_YUV420P
117 				|| pixelFormat == PIX_FMT_YUVJ420P) {
118 //				if (cpu.HasMMX()) {
119 //					TRACE("resolve_colorspace: "
120 //						"gfx_conv_yuv420p_ycbcr422_mmx\n");
121 //					return gfx_conv_yuv420p_ycbcr422_mmx;
122 //				} else {
123 					TRACE("resolve_colorspace: gfx_conv_yuv420p_ycbcr422_c\n");
124 					return gfx_conv_yuv420p_ycbcr422_c;
125 //				}
126 			}
127 
128 			if (pixelFormat == PIX_FMT_YUYV422) {
129 //				if (cpu.HasMMX()) {
130 //					TRACE("resolve_colorspace: PIX_FMT_YUV422 => B_YCbCr422: "
131 //						"gfx_conv_null_mmx\n");
132 //					return gfx_conv_null_mmx;
133 //				} else {
134 					TRACE("resolve_colorspace: PIX_FMT_YUV422 => B_YCbCr422: "
135 						"gfx_conv_null_c\n");
136 					return gfx_conv_null_c;
137 //				}
138 			}
139 
140 			TRACE("resolve_colorspace: %s => B_YCbCr422: NULL\n",
141 				pixfmt_to_string(pixelFormat));
142 			return gfx_conv_null_c;
143 
144 		default:
145 			TRACE("resolve_colorspace: default: NULL!!!\n");
146 			return NULL;
147 	}
148 }
149 
150 
151 const char*
152 pixfmt_to_string(int pixFormat)
153 {
154 	switch (pixFormat) {
155 		case PIX_FMT_NONE:
156 			return "PIX_FMT_NONE";
157 
158 		case PIX_FMT_YUV420P:
159 			// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
160 			return "PIX_FMT_YUV420P";
161 
162 		case PIX_FMT_YUYV422:
163 			// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
164 			return "PIX_FMT_YUYV422";
165 
166 		case PIX_FMT_RGB24:
167 			// packed RGB 8:8:8, 24bpp, RGBRGB...
168 			return "PIX_FMT_RGB24";
169 
170 		case PIX_FMT_BGR24:
171 			// packed RGB 8:8:8, 24bpp, BGRBGR...
172 			return "PIX_FMT_BGR24";
173 
174 		case PIX_FMT_YUV422P:
175 			// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
176 			return "PIX_FMT_YUV422P";
177 
178 		case PIX_FMT_YUV444P:
179 			// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
180 			return "PIX_FMT_YUV444P";
181 
182 		case PIX_FMT_RGB32:
183 			// packed RGB 8:8:8, 32bpp, (msb)8A 8R 8G 8B(lsb), in CPU
184 			// endianness
185 			return "PIX_FMT_RGB32";
186 
187 		case PIX_FMT_YUV410P:
188 			// planar YUV 4:1:0,  9bpp, (1 Cr & Cb sample per 4x4 Y samples)
189 			return "PIX_FMT_YUV410P";
190 
191 		case PIX_FMT_YUV411P:
192 			// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
193 			return "PIX_FMT_YUV411P";
194 
195 		case PIX_FMT_RGB565:
196 			// packed RGB 5:6:5, 16bpp, (msb)5R 6G 5B(lsb), in CPU endianness
197 			return "PIX_FMT_RGB565";
198 
199 		case PIX_FMT_RGB555:
200 			// packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), in CPU
201 			// endianness, most significant bit to 0
202 			return "PIX_FMT_RGB555";
203 
204 		case PIX_FMT_GRAY8:
205 			// Y, 8bpp
206 			return "PIX_FMT_GRAY8";
207 
208 		case PIX_FMT_MONOWHITE:
209 			// Y, 1bpp, 0 is white, 1 is black
210 			return "PIX_FMT_MONOWHITE";
211 
212 		case PIX_FMT_MONOBLACK:
213 			// Y, 1bpp, 0 is black, 1 is white
214 			return "PIX_FMT_MONOBLACK";
215 
216 		case PIX_FMT_PAL8:
217 			// 8 bit with PIX_FMT_RGB32 palette
218 			return "PIX_FMT_PAL8";
219 
220 		case PIX_FMT_YUVJ420P:
221 			// planar YUV 4:2:0, 12bpp, full scale (JPEG)
222 			return "PIX_FMT_YUVJ420P - YUV420P (Jpeg)";
223 
224 		case PIX_FMT_YUVJ422P:
225 			// planar YUV 4:2:2, 16bpp, full scale (JPEG)
226 			return "PIX_FMT_YUVJ422P - YUV422P (Jpeg)";
227 
228 		case PIX_FMT_YUVJ444P:
229 			// planar YUV 4:4:4, 24bpp, full scale (JPEG)
230 			return "PIX_FMT_YUVJ444P";
231 
232 		case PIX_FMT_XVMC_MPEG2_MC:
233 			// XVideo Motion Acceleration via common packet passing
234 			return "PIX_FMT_XVMC_MPEG2_MC";
235 
236 		case PIX_FMT_XVMC_MPEG2_IDCT:
237 			return "PIX_FMT_XVMC_MPEG2_IDCT";
238 		case PIX_FMT_UYVY422:
239 			// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
240 			return "PIX_FMT_UYVY422";
241 
242 		case PIX_FMT_UYYVYY411:
243 			// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
244 			return "PIX_FMT_UYYVYY411";
245 
246 		case PIX_FMT_BGR32:
247 			// packed RGB 8:8:8, 32bpp, (msb)8A 8B 8G 8R(lsb), in CPU
248 			// endianness
249 			return "PIX_FMT_BGR32";
250 
251 		case PIX_FMT_BGR565:
252 			// packed RGB 5:6:5, 16bpp, (msb)5B 6G 5R(lsb), in CPU endianness
253 			return "PIX_FMT_BGR565";
254 
255 		case PIX_FMT_BGR555:
256 			// packed RGB 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), in CPU
257 			// endianness, most significant bit to 1
258 			return "PIX_FMT_BGR555";
259 
260 		case PIX_FMT_BGR8:
261 			// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
262 			return "PIX_FMT_BGR8";
263 
264 		case PIX_FMT_BGR4:
265 			// packed RGB 1:2:1, 4bpp, (msb)1B 2G 1R(lsb)
266 			return "PIX_FMT_BGR4";
267 
268 		case PIX_FMT_BGR4_BYTE:
269 			// packed RGB 1:2:1,  8bpp, (msb)1B 2G 1R(lsb)
270 			return "PIX_FMT_BGR4_BYTE";
271 
272 		case PIX_FMT_RGB8:
273 			// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
274 			return "PIX_FMT_RGB8";
275 
276 		case PIX_FMT_RGB4:
277 			// packed RGB 1:2:1, 4bpp, (msb)1R 2G 1B(lsb)
278 			return "PIX_FMT_RGB4";
279 
280 		case PIX_FMT_RGB4_BYTE:
281 			// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
282 			return "PIX_FMT_RGB4_BYTE";
283 
284 		case PIX_FMT_NV12:
285 			// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 for UV
286 			return "PIX_FMT_NV12";
287 
288 		case PIX_FMT_NV21:
289 			// as above, but U and V bytes are swapped
290 			return "PIX_FMT_NV21";
291 
292 		case PIX_FMT_RGB32_1:
293 			// packed RGB 8:8:8, 32bpp, (msb)8R 8G 8B 8A(lsb), in CPU
294 			// endianness
295 			return "PIX_FMT_RGB32_1";
296 
297 		case PIX_FMT_BGR32_1:
298 			// packed RGB 8:8:8, 32bpp, (msb)8B 8G 8R 8A(lsb), in CPU
299 			// endianness
300 			return "PIX_FMT_BGR32_1";
301 
302 		case PIX_FMT_GRAY16BE:
303 			// Y, 16bpp, big-endian
304 			return "PIX_FMT_GRAY16BE";
305 
306 		case PIX_FMT_GRAY16LE:
307 			// Y, 16bpp, little-endian
308 			return "PIX_FMT_GRAY16LE";
309 
310 		case PIX_FMT_YUV440P:
311 			// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
312 			return "PIX_FMT_YUV440P";
313 
314 		case PIX_FMT_YUVJ440P:
315 			// planar YUV 4:4:0 full scale (JPEG)
316 			return "PIX_FMT_YUVJ440P - YUV440P (Jpeg)";
317 
318 		case PIX_FMT_YUVA420P:
319 			// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A
320 			// samples)
321 			return "PIX_FMT_YUVA420P - YUV420P (Alpha)";
322 
323 		case PIX_FMT_VDPAU_H264:
324 			// H.264 HW decoding with VDPAU, data[0] contains a
325 			// vdpau_render_state struct which contains the bitstream of the
326 			// slices as well as various fields extracted from headers
327 			return "PIX_FMT_VDPAU_H264";
328 
329 		case PIX_FMT_VDPAU_MPEG1:
330 			// MPEG-1 HW decoding with VDPAU, data[0] contains a
331 			// vdpau_render_state struct which contains the bitstream of the
332 			// slices as well as various fields extracted from headers
333 			return "PIX_FMT_VDPAU_MPEG1";
334 
335 		case PIX_FMT_VDPAU_MPEG2:
336 			// MPEG-2 HW decoding with VDPAU, data[0] contains a
337 			// vdpau_render_state struct which contains the bitstream of the
338 			// slices as well as various fields extracted from headers
339 			return "PIX_FMT_VDPAU_MPEG2";
340 
341 		case PIX_FMT_VDPAU_WMV3:
342 			// WMV3 HW decoding with VDPAU, data[0] contains a
343 			// vdpau_render_state struct which contains the bitstream of the
344 			// slices as well as various fields extracted from headers
345 			return "PIX_FMT_VDPAU_WMV3";
346 
347 		case PIX_FMT_VDPAU_VC1:
348 			// VC-1 HW decoding with VDPAU, data[0] contains a
349 			// vdpau_render_state struct which contains the bitstream of the
350 			// slices as well as various fields extracted from headers
351 			return "PIX_FMT_VDPAU_VC1";
352 
353 		case PIX_FMT_RGB48BE:
354 			// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, big-endian
355 			return "PIX_FMT_RGB48BE";
356 
357 		case PIX_FMT_RGB48LE:
358 			// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, little-endian
359 			return "PIX_FMT_RGB48LE";
360 
361 		case PIX_FMT_VAAPI_MOCO:
362 			// HW acceleration through VA API at motion compensation
363 			// entry-point, Picture.data[0] contains a vaapi_render_state
364 			// struct which contains macroblocks as well as various fields
365 			// extracted from headers
366 			return "PIX_FMT_VAAPI_MOCO";
367 
368 		case PIX_FMT_VAAPI_IDCT:
369 			// HW acceleration through VA API at IDCT entry-point,
370 			// Picture.data[0] contains a vaapi_render_state struct which
371 			// contains fields extracted from headers
372 			return "PIX_FMT_VAAPI_IDCT";
373 
374 		case PIX_FMT_VAAPI_VLD:
375 			// HW decoding through VA API, Picture.data[0] contains a
376 			// vaapi_render_state struct which contains the bitstream of the
377 			// slices as well as various fields extracted from headers
378 			return "PIX_FMT_VAAPI_VLD";
379 
380 		default:
381 			return "(unknown)";
382 	}
383 }
384 
385 
386 color_space
387 pixfmt_to_colorspace(int pixFormat)
388 {
389 	switch(pixFormat) {
390 		default:
391 			TRACE("No BE API colorspace definition for pixel format "
392 				"\"%s\".\n", pixfmt_to_string(pixFormat));
393 			// Supposed to fall through.
394 		case PIX_FMT_NONE:
395 			return B_NO_COLOR_SPACE;
396 
397 		// NOTE: See pixfmt_to_colorspace() for what these are.
398 		case PIX_FMT_YUV420P:
399 			return B_YUV420;
400 		case PIX_FMT_YUYV422:
401 			return B_YUV422;
402 		case PIX_FMT_RGB24:
403 			return B_RGB24_BIG;
404 		case PIX_FMT_BGR24:
405 			return B_RGB24;
406 		case PIX_FMT_YUV422P:
407 			return B_YUV422;
408 		case PIX_FMT_YUV444P:
409 			return B_YUV444;
410 		case PIX_FMT_RGB32:
411 			return B_RGBA32_BIG;
412 		case PIX_FMT_YUV410P:
413 			return B_YUV9;
414 		case PIX_FMT_YUV411P:
415 			return B_YUV12;
416 		case PIX_FMT_RGB565:
417 			return B_RGB16_BIG;
418 		case PIX_FMT_RGB555:
419 			return B_RGB15_BIG;
420 		case PIX_FMT_GRAY8:
421 			return B_GRAY8;
422 		case PIX_FMT_MONOBLACK:
423 			return B_GRAY1;
424 		case PIX_FMT_PAL8:
425 			return B_CMAP8;
426 		case PIX_FMT_BGR32:
427 			return B_RGB32;
428 		case PIX_FMT_BGR565:
429 			return B_RGB16;
430 		case PIX_FMT_BGR555:
431 			return B_RGB15;
432 	}
433 }
434 
435 
436 PixelFormat
437 colorspace_to_pixfmt(color_space format)
438 {
439 	switch(format) {
440 		default:
441 		case B_NO_COLOR_SPACE:
442 			return PIX_FMT_NONE;
443 
444 		// NOTE: See pixfmt_to_colorspace() for what these are.
445 		case B_YUV420:
446 			return PIX_FMT_YUV420P;
447 		case B_YUV422:
448 			return PIX_FMT_YUV422P;
449 		case B_RGB24_BIG:
450 			return PIX_FMT_RGB24;
451 		case B_RGB24:
452 			return PIX_FMT_BGR24;
453 		case B_YUV444:
454 			return PIX_FMT_YUV444P;
455 		case B_RGBA32_BIG:
456 		case B_RGB32_BIG:
457 			return PIX_FMT_BGR32;
458 		case B_YUV9:
459 			return PIX_FMT_YUV410P;
460 		case B_YUV12:
461 			return PIX_FMT_YUV411P;
462 		// TODO: YCbCr color spaces! These are not the same as YUV!
463 		case B_RGB16_BIG:
464 			return PIX_FMT_RGB565;
465 		case B_RGB15_BIG:
466 			return PIX_FMT_RGB555;
467 		case B_GRAY8:
468 			return PIX_FMT_GRAY8;
469 		case B_GRAY1:
470 			return PIX_FMT_MONOBLACK;
471 		case B_CMAP8:
472 			return PIX_FMT_PAL8;
473 		case B_RGBA32:
474 		case B_RGB32:
475 			return PIX_FMT_RGB32;
476 		case B_RGB16:
477 			return PIX_FMT_BGR565;
478 		case B_RGB15:
479 			return PIX_FMT_BGR555;
480 	}
481 }
482 
483 
484 #define BEGIN_TAG "\033[31m"
485 #define END_TAG "\033[0m"
486 
487 void
488 dump_ffframe(AVFrame* frame, const char* name)
489 {
490 	const char* picttypes[] = {"no pict type", "intra", "predicted",
491 		"bidir pre", "s(gmc)-vop"};
492 	printf(BEGIN_TAG"AVFrame(%s) pts:%-10lld cnum:%-5d dnum:%-5d %s%s, "
493 		" ]\n"END_TAG,
494 		name,
495 		frame->pts,
496 		frame->coded_picture_number,
497 		frame->display_picture_number,
498 //		frame->quality,
499 		frame->key_frame?"keyframe, ":"",
500 		picttypes[frame->pict_type]);
501 //	printf(BEGIN_TAG"\t\tlinesize[] = {%ld, %ld, %ld, %ld}\n"END_TAG,
502 //		frame->linesize[0], frame->linesize[1], frame->linesize[2],
503 //		frame->linesize[3]);
504 }
505 
506