xref: /haiku/src/add-ons/media/plugins/ffmpeg/gfx_util.cpp (revision d374a27286b8a52974a97dba0d5966ea026a665d)
1 #include "gfx_util.h"
2 
3 #include <strings.h>
4 #include <stdio.h>
5 
6 #include "CpuCapabilities.h"
7 #include "gfx_conv_c.h"
8 #include "gfx_conv_mmx.h"
9 
10 
11 // ref docs
12 // http://www.joemaller.com/fcp/fxscript_yuv_color.shtml
13 
14 
15 #if 1
16   #define TRACE(a...) printf(a)
17 #else
18   #define TRACE(a...)
19 #endif
20 
21 
22 //! This function will try to find the best colorspaces for both the ff-codec
23 // and the Media Kit sides.
24 gfx_convert_func
25 resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
26 	int height)
27 {
28 	CPUCapabilities cpu;
29 
30 	switch (colorSpace) {
31 		case B_RGB32:
32 			// Planar Formats
33 			if (pixelFormat == PIX_FMT_YUV410P) {
34 				TRACE("resolve_colorspace: gfx_conv_yuv410p_rgb32_c\n");
35 				return gfx_conv_yuv410p_rgb32_c;
36 			}
37 
38 			if (pixelFormat == PIX_FMT_YUV411P) {
39 				TRACE("resolve_colorspace: gfx_conv_yuv411p_rgb32_c\n");
40 				return gfx_conv_yuv411p_rgb32_c;
41 			}
42 
43 			if (pixelFormat == PIX_FMT_YUV420P
44 				|| pixelFormat == PIX_FMT_YUVJ420P) {
45 				if (cpu.HasSSSE3() && width % 8 == 0 && height % 2 == 0) {
46 					TRACE("resolve_colorspace: gfx_conv_yuv420p_rgba32_ssse3\n");
47 					return gfx_conv_yuv420p_rgba32_ssse3;
48 				} else if (cpu.HasSSE2() && width % 8 == 0 && height % 2 == 0) {
49 					TRACE("resolve_colorspace: gfx_conv_yuv420p_rgba32_sse2\n");
50 					return gfx_conv_yuv420p_rgba32_sse2;
51 				} else if (cpu.HasSSE1() && width % 4 == 0
52 					&& height % 2 == 0) {
53 					TRACE("resolve_colorspace: gfx_conv_yuv420p_rgba32_sse\n");
54 					return gfx_conv_yuv420p_rgba32_sse;
55 				} else {
56 					TRACE("resolve_colorspace: gfx_conv_YCbCr420p_RGB32_c\n");
57 					return gfx_conv_YCbCr420p_RGB32_c;
58 				}
59 			}
60 
61 			if (pixelFormat == PIX_FMT_YUV422P
62 				|| pixelFormat == PIX_FMT_YUVJ422P) {
63 				if (cpu.HasSSSE3() && width % 8 == 0) {
64 					TRACE("resolve_colorspace: gfx_conv_yuv422p_RGB32_ssse3\n");
65 					return gfx_conv_yuv422p_rgba32_ssse3;
66 				} else if (cpu.HasSSE2() && width % 8 == 0) {
67 					TRACE("resolve_colorspace: gfx_conv_yuv422p_RGB32_sse2\n");
68 					return gfx_conv_yuv422p_rgba32_sse2;
69 				} else if (cpu.HasSSE1() && width % 4 == 0) {
70 					TRACE("resolve_colorspace: gfx_conv_yuv422p_RGB32_sse\n");
71 					return gfx_conv_yuv422p_rgba32_sse;
72 				} else {
73 					TRACE("resolve_colorspace: gfx_conv_YCbCr422p_RGB32_c\n");
74 					return gfx_conv_YCbCr422_RGB32_c;
75 				}
76 			}
77 
78 			// Packed Formats
79 			if (pixelFormat == PIX_FMT_YUYV422) {
80 				if (cpu.HasSSSE3() && width % 8 == 0) {
81 					return gfx_conv_yuv422_rgba32_ssse3;
82 				} else if (cpu.HasSSE2() && width % 8 == 0) {
83 					return gfx_conv_yuv422_rgba32_sse2;
84 				} else if (cpu.HasSSE1() && width % 4 == 0
85 					&& height % 2 == 0) {
86 					return gfx_conv_yuv422_rgba32_sse;
87 				} else {
88 					return gfx_conv_YCbCr422_RGB32_c;
89 				}
90 			}
91 
92 			TRACE("resolve_colorspace: %s => B_RGB32: NULL\n",
93 				pixfmt_to_string(pixelFormat));
94 			return NULL;
95 
96 		case B_RGB24_BIG:
97 			TRACE("resolve_colorspace: %s => B_RGB24_BIG: NULL\n",
98 				pixfmt_to_string(pixelFormat));
99 			return NULL;
100 
101 		case B_RGB24:
102 			TRACE("resolve_colorspace: %s => B_RGB24: NULL\n",
103 				pixfmt_to_string(pixelFormat));
104 			return NULL;
105 
106 		case B_YCbCr422:
107 			if (pixelFormat == PIX_FMT_YUV410P) {
108 				TRACE("resolve_colorspace: gfx_conv_yuv410p_ycbcr422_c\n");
109 				return gfx_conv_yuv410p_ycbcr422_c;
110 			}
111 
112 			if (pixelFormat == PIX_FMT_YUV411P) {
113 				TRACE("resolve_colorspace: gfx_conv_yuv411p_ycbcr422_c\n");
114 				return gfx_conv_yuv411p_ycbcr422_c;
115 			}
116 
117 			if (pixelFormat == PIX_FMT_YUV420P
118 				|| pixelFormat == PIX_FMT_YUVJ420P) {
119 				TRACE("resolve_colorspace: gfx_conv_yuv420p_ycbcr422_c\n");
120 				return gfx_conv_yuv420p_ycbcr422_c;
121 			}
122 
123 			if (pixelFormat == PIX_FMT_YUYV422) {
124 				TRACE("resolve_colorspace: PIX_FMT_YUV422 => B_YCbCr422: "
125 					"gfx_conv_null\n");
126 				return gfx_conv_null;
127 			}
128 
129 			TRACE("resolve_colorspace: %s => B_YCbCr422: NULL\n",
130 				pixfmt_to_string(pixelFormat));
131 			return NULL;
132 
133 		default:
134 			TRACE("resolve_colorspace: default: NULL!!!\n");
135 			return NULL;
136 	}
137 }
138 
139 
140 const char*
141 pixfmt_to_string(int pixFormat)
142 {
143 	switch (pixFormat) {
144 		case PIX_FMT_NONE:
145 			return "PIX_FMT_NONE";
146 
147 		case PIX_FMT_YUV420P:
148 			// planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
149 			return "PIX_FMT_YUV420P";
150 
151 		case PIX_FMT_YUYV422:
152 			// packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
153 			return "PIX_FMT_YUYV422";
154 
155 		case PIX_FMT_RGB24:
156 			// packed RGB 8:8:8, 24bpp, RGBRGB...
157 			return "PIX_FMT_RGB24";
158 
159 		case PIX_FMT_BGR24:
160 			// packed RGB 8:8:8, 24bpp, BGRBGR...
161 			return "PIX_FMT_BGR24";
162 
163 		case PIX_FMT_YUV422P:
164 			// planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
165 			return "PIX_FMT_YUV422P";
166 
167 		case PIX_FMT_YUV444P:
168 			// planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
169 			return "PIX_FMT_YUV444P";
170 
171 		case PIX_FMT_RGB32:
172 			// packed RGB 8:8:8, 32bpp, (msb)8A 8R 8G 8B(lsb), in CPU
173 			// endianness
174 			return "PIX_FMT_RGB32";
175 
176 		case PIX_FMT_YUV410P:
177 			// planar YUV 4:1:0,  9bpp, (1 Cr & Cb sample per 4x4 Y samples)
178 			return "PIX_FMT_YUV410P";
179 
180 		case PIX_FMT_YUV411P:
181 			// planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
182 			return "PIX_FMT_YUV411P";
183 
184 		case PIX_FMT_RGB565:
185 			// packed RGB 5:6:5, 16bpp, (msb)5R 6G 5B(lsb), in CPU endianness
186 			return "PIX_FMT_RGB565";
187 
188 		case PIX_FMT_RGB555:
189 			// packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), in CPU
190 			// endianness, most significant bit to 0
191 			return "PIX_FMT_RGB555";
192 
193 		case PIX_FMT_GRAY8:
194 			// Y, 8bpp
195 			return "PIX_FMT_GRAY8";
196 
197 		case PIX_FMT_MONOWHITE:
198 			// Y, 1bpp, 0 is white, 1 is black
199 			return "PIX_FMT_MONOWHITE";
200 
201 		case PIX_FMT_MONOBLACK:
202 			// Y, 1bpp, 0 is black, 1 is white
203 			return "PIX_FMT_MONOBLACK";
204 
205 		case PIX_FMT_PAL8:
206 			// 8 bit with PIX_FMT_RGB32 palette
207 			return "PIX_FMT_PAL8";
208 
209 		case PIX_FMT_YUVJ420P:
210 			// planar YUV 4:2:0, 12bpp, full scale (JPEG)
211 			return "PIX_FMT_YUVJ420P - YUV420P (Jpeg)";
212 
213 		case PIX_FMT_YUVJ422P:
214 			// planar YUV 4:2:2, 16bpp, full scale (JPEG)
215 			return "PIX_FMT_YUVJ422P - YUV422P (Jpeg)";
216 
217 		case PIX_FMT_YUVJ444P:
218 			// planar YUV 4:4:4, 24bpp, full scale (JPEG)
219 			return "PIX_FMT_YUVJ444P";
220 
221 		case PIX_FMT_XVMC_MPEG2_MC:
222 			// XVideo Motion Acceleration via common packet passing
223 			return "PIX_FMT_XVMC_MPEG2_MC";
224 
225 		case PIX_FMT_XVMC_MPEG2_IDCT:
226 			return "PIX_FMT_XVMC_MPEG2_IDCT";
227 		case PIX_FMT_UYVY422:
228 			// packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
229 			return "PIX_FMT_UYVY422";
230 
231 		case PIX_FMT_UYYVYY411:
232 			// packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
233 			return "PIX_FMT_UYYVYY411";
234 
235 		case PIX_FMT_BGR32:
236 			// packed RGB 8:8:8, 32bpp, (msb)8A 8B 8G 8R(lsb), in CPU
237 			// endianness
238 			return "PIX_FMT_BGR32";
239 
240 		case PIX_FMT_BGR565:
241 			// packed RGB 5:6:5, 16bpp, (msb)5B 6G 5R(lsb), in CPU endianness
242 			return "PIX_FMT_BGR565";
243 
244 		case PIX_FMT_BGR555:
245 			// packed RGB 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), in CPU
246 			// endianness, most significant bit to 1
247 			return "PIX_FMT_BGR555";
248 
249 		case PIX_FMT_BGR8:
250 			// packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
251 			return "PIX_FMT_BGR8";
252 
253 		case PIX_FMT_BGR4:
254 			// packed RGB 1:2:1, 4bpp, (msb)1B 2G 1R(lsb)
255 			return "PIX_FMT_BGR4";
256 
257 		case PIX_FMT_BGR4_BYTE:
258 			// packed RGB 1:2:1,  8bpp, (msb)1B 2G 1R(lsb)
259 			return "PIX_FMT_BGR4_BYTE";
260 
261 		case PIX_FMT_RGB8:
262 			// packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
263 			return "PIX_FMT_RGB8";
264 
265 		case PIX_FMT_RGB4:
266 			// packed RGB 1:2:1, 4bpp, (msb)1R 2G 1B(lsb)
267 			return "PIX_FMT_RGB4";
268 
269 		case PIX_FMT_RGB4_BYTE:
270 			// packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
271 			return "PIX_FMT_RGB4_BYTE";
272 
273 		case PIX_FMT_NV12:
274 			// planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 for UV
275 			return "PIX_FMT_NV12";
276 
277 		case PIX_FMT_NV21:
278 			// as above, but U and V bytes are swapped
279 			return "PIX_FMT_NV21";
280 
281 		case PIX_FMT_RGB32_1:
282 			// packed RGB 8:8:8, 32bpp, (msb)8R 8G 8B 8A(lsb), in CPU
283 			// endianness
284 			return "PIX_FMT_RGB32_1";
285 
286 		case PIX_FMT_BGR32_1:
287 			// packed RGB 8:8:8, 32bpp, (msb)8B 8G 8R 8A(lsb), in CPU
288 			// endianness
289 			return "PIX_FMT_BGR32_1";
290 
291 		case PIX_FMT_GRAY16BE:
292 			// Y, 16bpp, big-endian
293 			return "PIX_FMT_GRAY16BE";
294 
295 		case PIX_FMT_GRAY16LE:
296 			// Y, 16bpp, little-endian
297 			return "PIX_FMT_GRAY16LE";
298 
299 		case PIX_FMT_YUV440P:
300 			// planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
301 			return "PIX_FMT_YUV440P";
302 
303 		case PIX_FMT_YUVJ440P:
304 			// planar YUV 4:4:0 full scale (JPEG)
305 			return "PIX_FMT_YUVJ440P - YUV440P (Jpeg)";
306 
307 		case PIX_FMT_YUVA420P:
308 			// planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A
309 			// samples)
310 			return "PIX_FMT_YUVA420P - YUV420P (Alpha)";
311 
312 		case PIX_FMT_VDPAU_H264:
313 			// H.264 HW decoding with VDPAU, data[0] contains a
314 			// vdpau_render_state struct which contains the bitstream of the
315 			// slices as well as various fields extracted from headers
316 			return "PIX_FMT_VDPAU_H264";
317 
318 		case PIX_FMT_VDPAU_MPEG1:
319 			// MPEG-1 HW decoding with VDPAU, data[0] contains a
320 			// vdpau_render_state struct which contains the bitstream of the
321 			// slices as well as various fields extracted from headers
322 			return "PIX_FMT_VDPAU_MPEG1";
323 
324 		case PIX_FMT_VDPAU_MPEG2:
325 			// MPEG-2 HW decoding with VDPAU, data[0] contains a
326 			// vdpau_render_state struct which contains the bitstream of the
327 			// slices as well as various fields extracted from headers
328 			return "PIX_FMT_VDPAU_MPEG2";
329 
330 		case PIX_FMT_VDPAU_WMV3:
331 			// WMV3 HW decoding with VDPAU, data[0] contains a
332 			// vdpau_render_state struct which contains the bitstream of the
333 			// slices as well as various fields extracted from headers
334 			return "PIX_FMT_VDPAU_WMV3";
335 
336 		case PIX_FMT_VDPAU_VC1:
337 			// VC-1 HW decoding with VDPAU, data[0] contains a
338 			// vdpau_render_state struct which contains the bitstream of the
339 			// slices as well as various fields extracted from headers
340 			return "PIX_FMT_VDPAU_VC1";
341 
342 		case PIX_FMT_RGB48BE:
343 			// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, big-endian
344 			return "PIX_FMT_RGB48BE";
345 
346 		case PIX_FMT_RGB48LE:
347 			// packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, little-endian
348 			return "PIX_FMT_RGB48LE";
349 
350 		case PIX_FMT_VAAPI_MOCO:
351 			// HW acceleration through VA API at motion compensation
352 			// entry-point, Picture.data[0] contains a vaapi_render_state
353 			// struct which contains macroblocks as well as various fields
354 			// extracted from headers
355 			return "PIX_FMT_VAAPI_MOCO";
356 
357 		case PIX_FMT_VAAPI_IDCT:
358 			// HW acceleration through VA API at IDCT entry-point,
359 			// Picture.data[0] contains a vaapi_render_state struct which
360 			// contains fields extracted from headers
361 			return "PIX_FMT_VAAPI_IDCT";
362 
363 		case PIX_FMT_VAAPI_VLD:
364 			// HW decoding through VA API, Picture.data[0] contains a
365 			// vaapi_render_state struct which contains the bitstream of the
366 			// slices as well as various fields extracted from headers
367 			return "PIX_FMT_VAAPI_VLD";
368 
369 		default:
370 			return "(unknown)";
371 	}
372 }
373 
374 
375 color_space
376 pixfmt_to_colorspace(int pixFormat)
377 {
378 	switch(pixFormat) {
379 		default:
380 			TRACE("No BE API colorspace definition for pixel format "
381 				"\"%s\".\n", pixfmt_to_string(pixFormat));
382 			// Supposed to fall through.
383 		case PIX_FMT_NONE:
384 			return B_NO_COLOR_SPACE;
385 
386 		// NOTE: See pixfmt_to_colorspace() for what these are.
387 		case PIX_FMT_YUV420P:
388 			return B_YUV420;
389 		case PIX_FMT_YUYV422:
390 			return B_YUV422;
391 		case PIX_FMT_RGB24:
392 			return B_RGB24_BIG;
393 		case PIX_FMT_BGR24:
394 			return B_RGB24;
395 		case PIX_FMT_YUV422P:
396 			return B_YUV422;
397 		case PIX_FMT_YUV444P:
398 			return B_YUV444;
399 		case PIX_FMT_RGB32:
400 			return B_RGBA32_BIG;
401 		case PIX_FMT_YUV410P:
402 			return B_YUV9;
403 		case PIX_FMT_YUV411P:
404 			return B_YUV12;
405 		case PIX_FMT_RGB565:
406 			return B_RGB16_BIG;
407 		case PIX_FMT_RGB555:
408 			return B_RGB15_BIG;
409 		case PIX_FMT_GRAY8:
410 			return B_GRAY8;
411 		case PIX_FMT_MONOBLACK:
412 			return B_GRAY1;
413 		case PIX_FMT_PAL8:
414 			return B_CMAP8;
415 		case PIX_FMT_BGR32:
416 			return B_RGB32;
417 		case PIX_FMT_BGR565:
418 			return B_RGB16;
419 		case PIX_FMT_BGR555:
420 			return B_RGB15;
421 	}
422 }
423 
424 
425 PixelFormat
426 colorspace_to_pixfmt(color_space format)
427 {
428 	switch(format) {
429 		default:
430 		case B_NO_COLOR_SPACE:
431 			return PIX_FMT_NONE;
432 
433 		// NOTE: See pixfmt_to_colorspace() for what these are.
434 		case B_YUV420:
435 			return PIX_FMT_YUV420P;
436 		case B_YUV422:
437 			return PIX_FMT_YUV422P;
438 		case B_RGB24_BIG:
439 			return PIX_FMT_RGB24;
440 		case B_RGB24:
441 			return PIX_FMT_BGR24;
442 		case B_YUV444:
443 			return PIX_FMT_YUV444P;
444 		case B_RGBA32_BIG:
445 		case B_RGB32_BIG:
446 			return PIX_FMT_BGR32;
447 		case B_YUV9:
448 			return PIX_FMT_YUV410P;
449 		case B_YUV12:
450 			return PIX_FMT_YUV411P;
451 		// TODO: YCbCr color spaces! These are not the same as YUV!
452 		case B_RGB16_BIG:
453 			return PIX_FMT_RGB565;
454 		case B_RGB15_BIG:
455 			return PIX_FMT_RGB555;
456 		case B_GRAY8:
457 			return PIX_FMT_GRAY8;
458 		case B_GRAY1:
459 			return PIX_FMT_MONOBLACK;
460 		case B_CMAP8:
461 			return PIX_FMT_PAL8;
462 		case B_RGBA32:
463 		case B_RGB32:
464 			return PIX_FMT_RGB32;
465 		case B_RGB16:
466 			return PIX_FMT_BGR565;
467 		case B_RGB15:
468 			return PIX_FMT_BGR555;
469 	}
470 }
471 
472 
473 #define BEGIN_TAG "\033[31m"
474 #define END_TAG "\033[0m"
475 
476 void
477 dump_ffframe(AVFrame* frame, const char* name)
478 {
479 	const char* picttypes[] = {"no pict type", "intra", "predicted",
480 		"bidir pre", "s(gmc)-vop"};
481 	printf(BEGIN_TAG"AVFrame(%s) pts:%-10lld cnum:%-5d dnum:%-5d %s%s, "
482 		" ]\n"END_TAG,
483 		name,
484 		frame->pts,
485 		frame->coded_picture_number,
486 		frame->display_picture_number,
487 //		frame->quality,
488 		frame->key_frame?"keyframe, ":"",
489 		picttypes[frame->pict_type]);
490 //	printf(BEGIN_TAG"\t\tlinesize[] = {%ld, %ld, %ld, %ld}\n"END_TAG,
491 //		frame->linesize[0], frame->linesize[1], frame->linesize[2],
492 //		frame->linesize[3]);
493 }
494 
495