21#include "../SDL_internal.h"
31#define HAVE_FAST_WRITE_INT8 1
35# undef HAVE_FAST_WRITE_INT8
36# define HAVE_FAST_WRITE_INT8 0
41#if SDL_ALTIVEC_BLITTERS
46#include <sys/sysctl.h>
50 const char key[] =
"hw.l3cachesize";
52 size_t typeSize =
sizeof(
result);
70#if (defined(__MACOSX__) && (__GNUC__ < 4))
71#define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
72 (vector unsigned char) ( a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p )
73#define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
74 (vector unsigned short) ( a,b,c,d,e,f,g,h )
76#define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
77 (vector unsigned char) { a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p }
78#define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
79 (vector unsigned short) { a,b,c,d,e,f,g,h }
82#define UNALIGNED_PTR(x) (((size_t) x) & 0x0000000F)
83#define VSWIZZLE32(a,b,c,d) (vector unsigned char) \
84 ( 0x00+a, 0x00+b, 0x00+c, 0x00+d, \
85 0x04+a, 0x04+b, 0x04+c, 0x04+d, \
86 0x08+a, 0x08+b, 0x08+c, 0x08+d, \
87 0x0C+a, 0x0C+b, 0x0C+c, 0x0C+d )
89#define MAKE8888(dstfmt, r, g, b, a) \
90 ( ((r<<dstfmt->Rshift)&dstfmt->Rmask) | \
91 ((g<<dstfmt->Gshift)&dstfmt->Gmask) | \
92 ((b<<dstfmt->Bshift)&dstfmt->Bmask) | \
93 ((a<<dstfmt->Ashift)&dstfmt->Amask) )
101#define DST_CHAN_SRC 1
102#define DST_CHAN_DEST 2
105#define DST_CTRL(size, count, stride) \
106 (((size) << 24) | ((count) << 16) | (stride))
108#define VEC_ALIGNER(src) ((UNALIGNED_PTR(src)) \
110 : vec_add(vec_lvsl(8, src), vec_splat_u8(8)))
113static vector
unsigned char
125 0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000,
130 const vector
unsigned char plus = VECUINT8_LITERAL(0x00, 0x00, 0x00, 0x00,
131 0x04, 0x04, 0x04, 0x04,
132 0x08, 0x08, 0x08, 0x08,
135 vector
unsigned char vswiz;
136 vector
unsigned int srcvec;
137 Uint32 rmask, gmask, bmask, amask;
140 srcfmt = &default_pixel_format;
143 dstfmt = &default_pixel_format;
146#define RESHIFT(X) (3 - ((X) >> 3))
154 ((srcfmt->
Amask) ? RESHIFT(srcfmt->
163 ((
unsigned int *) (
char *) &srcvec)[0] = (rmask | gmask | bmask | amask);
164 vswiz = vec_add(plus, (vector
unsigned char) vec_splat(srcvec, 0));
178 vector
unsigned char valpha = vec_splat_u8(0);
179 vector
unsigned char vpermute = calc_swizzle32(srcfmt,
NULL);
180 vector
unsigned char vgmerge = VECUINT8_LITERAL(0x00, 0x02, 0x00, 0x06,
181 0x00, 0x0a, 0x00, 0x0e,
182 0x00, 0x12, 0x00, 0x16,
183 0x00, 0x1a, 0x00, 0x1e);
184 vector
unsigned short v1 = vec_splat_u16(1);
185 vector
unsigned short v3 = vec_splat_u16(3);
186 vector
unsigned short v3f =
187 VECUINT16_LITERAL(0x003f, 0x003f, 0x003f, 0x003f,
188 0x003f, 0x003f, 0x003f, 0x003f);
189 vector
unsigned short vfc =
190 VECUINT16_LITERAL(0x00fc, 0x00fc, 0x00fc, 0x00fc,
191 0x00fc, 0x00fc, 0x00fc, 0x00fc);
192 vector
unsigned short vf800 = (vector
unsigned short) vec_splat_u8(-7);
193 vf800 = vec_sl(vf800, vec_splat_u16(8));
196 vector
unsigned char valigner;
197 vector
unsigned char voverflow;
198 vector
unsigned char vsrc;
204#define ONE_PIXEL_BLEND(condition, widthvar) \
205 while (condition) { \
207 unsigned sR, sG, sB, sA; \
208 DISEMBLE_RGBA((Uint8 *)src, 4, srcfmt, Pixel, \
210 *(Uint16 *)(dst) = (((sR << 8) & 0x0000F800) | \
211 ((sG << 3) & 0x000007E0) | \
212 ((sB >> 3) & 0x0000001F)); \
221 extrawidth = (
width % 8);
223 vsrc = vec_ld(0,
src);
224 valigner = VEC_ALIGNER(
src);
227 vector
unsigned short vpixel, vrpixel, vgpixel, vbpixel;
228 vector
unsigned int vsrc1, vsrc2;
229 vector
unsigned char vdst;
231 voverflow = vec_ld(15,
src);
232 vsrc = vec_perm(vsrc, voverflow, valigner);
233 vsrc1 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
236 voverflow = vec_ld(15,
src);
237 vsrc = vec_perm(vsrc, voverflow, valigner);
238 vsrc2 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
240 vpixel = (vector
unsigned short) vec_packpx(vsrc1, vsrc2);
241 vgpixel = (vector
unsigned short) vec_perm(vsrc1, vsrc2, vgmerge);
242 vgpixel = vec_and(vgpixel, vfc);
243 vgpixel = vec_sl(vgpixel,
v3);
244 vrpixel = vec_sl(vpixel,
v1);
245 vrpixel = vec_and(vrpixel, vf800);
246 vbpixel = vec_and(vpixel, v3f);
248 vec_or((vector
unsigned char) vrpixel,
249 (vector
unsigned char) vgpixel);
251 vdst = vec_or(vdst, (vector
unsigned char) vbpixel);
252 vec_st(vdst, 0,
dst);
263 ONE_PIXEL_BLEND((extrawidth), extrawidth);
264#undef ONE_PIXEL_BLEND
284 vector
unsigned char valpha;
285 vector
unsigned char vpermute;
286 vector
unsigned short vf800;
287 vector
unsigned int v8 = vec_splat_u32(8);
288 vector
unsigned int v16 = vec_add(v8, v8);
289 vector
unsigned short v2 = vec_splat_u16(2);
290 vector
unsigned short v3 = vec_splat_u16(3);
296 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
297 0x10, 0x02, 0x01, 0x01,
298 0x10, 0x04, 0x01, 0x01,
301 vector
unsigned char vredalpha2 =
303 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
309 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
310 0x04, 0x05, 0x06, 0x13,
311 0x08, 0x09, 0x0a, 0x15,
312 0x0c, 0x0d, 0x0e, 0x17);
313 vector
unsigned char vblue2 =
314 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
320 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
321 0x04, 0x05, 0x12, 0x07,
322 0x08, 0x09, 0x14, 0x0b,
323 0x0c, 0x0d, 0x16, 0x0f);
324 vector
unsigned char vgreen2 =
326 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
332 vf800 = (vector
unsigned short) vec_splat_u8(-7);
333 vf800 = vec_sl(vf800, vec_splat_u16(8));
335 if (dstfmt->
Amask && info->
a) {
336 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
337 valpha = vec_splat(valpha, 0);
340 valpha = vec_splat_u8(0);
343 vpermute = calc_swizzle32(
NULL, dstfmt);
345 vector
unsigned char valigner;
346 vector
unsigned char voverflow;
347 vector
unsigned char vsrc;
353#define ONE_PIXEL_BLEND(condition, widthvar) \
354 while (condition) { \
355 unsigned sR, sG, sB; \
356 unsigned short Pixel = *((unsigned short *)src); \
357 sR = (Pixel >> 8) & 0xf8; \
358 sG = (Pixel >> 3) & 0xfc; \
359 sB = (Pixel << 3) & 0xf8; \
360 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
368 extrawidth = (
width % 8);
370 vsrc = vec_ld(0,
src);
371 valigner = VEC_ALIGNER(
src);
374 vector
unsigned short vR, vG, vB;
375 vector
unsigned char vdst1, vdst2;
377 voverflow = vec_ld(15,
src);
378 vsrc = vec_perm(vsrc, voverflow, valigner);
380 vR = vec_and((vector
unsigned short) vsrc, vf800);
381 vB = vec_sl((vector
unsigned short) vsrc,
v3);
385 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
387 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
388 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
389 vdst1 = vec_perm(vdst1, valpha, vpermute);
390 vec_st(vdst1, 0,
dst);
393 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
395 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
396 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
397 vdst2 = vec_perm(vdst2, valpha, vpermute);
398 vec_st(vdst2, 16,
dst);
410 ONE_PIXEL_BLEND((extrawidth), extrawidth);
411#undef ONE_PIXEL_BLEND
431 vector
unsigned char valpha;
432 vector
unsigned char vpermute;
433 vector
unsigned short vf800;
434 vector
unsigned int v8 = vec_splat_u32(8);
435 vector
unsigned int v16 = vec_add(v8, v8);
436 vector
unsigned short v1 = vec_splat_u16(1);
437 vector
unsigned short v3 = vec_splat_u16(3);
443 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
444 0x10, 0x02, 0x01, 0x01,
445 0x10, 0x04, 0x01, 0x01,
448 vector
unsigned char vredalpha2 =
450 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
456 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
457 0x04, 0x05, 0x06, 0x13,
458 0x08, 0x09, 0x0a, 0x15,
459 0x0c, 0x0d, 0x0e, 0x17);
460 vector
unsigned char vblue2 =
461 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
467 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
468 0x04, 0x05, 0x12, 0x07,
469 0x08, 0x09, 0x14, 0x0b,
470 0x0c, 0x0d, 0x16, 0x0f);
471 vector
unsigned char vgreen2 =
473 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
479 vf800 = (vector
unsigned short) vec_splat_u8(-7);
480 vf800 = vec_sl(vf800, vec_splat_u16(8));
482 if (dstfmt->
Amask && info->
a) {
483 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
484 valpha = vec_splat(valpha, 0);
487 valpha = vec_splat_u8(0);
490 vpermute = calc_swizzle32(
NULL, dstfmt);
492 vector
unsigned char valigner;
493 vector
unsigned char voverflow;
494 vector
unsigned char vsrc;
500#define ONE_PIXEL_BLEND(condition, widthvar) \
501 while (condition) { \
502 unsigned sR, sG, sB; \
503 unsigned short Pixel = *((unsigned short *)src); \
504 sR = (Pixel >> 7) & 0xf8; \
505 sG = (Pixel >> 2) & 0xf8; \
506 sB = (Pixel << 3) & 0xf8; \
507 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
515 extrawidth = (
width % 8);
517 vsrc = vec_ld(0,
src);
518 valigner = VEC_ALIGNER(
src);
521 vector
unsigned short vR, vG, vB;
522 vector
unsigned char vdst1, vdst2;
524 voverflow = vec_ld(15,
src);
525 vsrc = vec_perm(vsrc, voverflow, valigner);
527 vR = vec_and(vec_sl((vector
unsigned short) vsrc,
v1), vf800);
528 vB = vec_sl((vector
unsigned short) vsrc,
v3);
532 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
534 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
535 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
536 vdst1 = vec_perm(vdst1, valpha, vpermute);
537 vec_st(vdst1, 0,
dst);
540 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
542 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
543 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
544 vdst2 = vec_perm(vdst2, valpha, vpermute);
545 vec_st(vdst2, 16,
dst);
557 ONE_PIXEL_BLEND((extrawidth), extrawidth);
558#undef ONE_PIXEL_BLEND
580 int copy_alpha = (srcfmt->
Amask && dstfmt->
Amask);
584 vector
unsigned int valpha;
585 vector
unsigned char vpermute;
586 vector
unsigned char vzero;
587 vector
unsigned int vckey;
588 vector
unsigned int vrgbmask;
589 vpermute = calc_swizzle32(srcfmt, dstfmt);
590 if (info->
dst_w < 16) {
598 vzero = vec_splat_u8(0);
600 ((
unsigned char *) &valpha)[0] = (
unsigned char)
alpha;
602 (vector
unsigned int) vec_splat((vector
unsigned char) valpha, 0);
604 valpha = (vector
unsigned int) vzero;
607 ((
unsigned int *) (
char *) &vckey)[0] = ckey;
608 vckey = vec_splat(vckey, 0);
609 ((
unsigned int *) (
char *) &vrgbmask)[0] = rgbmask;
610 vrgbmask = vec_splat(vrgbmask, 0);
613#define ONE_PIXEL_BLEND(condition, widthvar) \
615 while (condition) { \
617 unsigned sR, sG, sB, sA; \
618 DISEMBLE_RGBA((Uint8 *)srcp, srcbpp, srcfmt, Pixel, \
620 if ( (Pixel & rgbmask) != ckey ) { \
621 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
624 dstp = (Uint32 *) (((Uint8 *) dstp) + dstbpp); \
625 srcp = (Uint32 *) (((Uint8 *) srcp) + srcbpp); \
629 while (condition) { \
631 unsigned sR, sG, sB; \
632 RETRIEVE_RGB_PIXEL((Uint8 *)srcp, srcbpp, Pixel); \
633 if ( Pixel != ckey ) { \
634 RGB_FROM_PIXEL(Pixel, srcfmt, sR, sG, sB); \
635 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
636 sR, sG, sB, alpha); \
638 dstp = (Uint32 *) (((Uint8 *)dstp) + dstbpp); \
639 srcp = (Uint32 *) (((Uint8 *)srcp) + srcbpp); \
644 ONE_PIXEL_BLEND((UNALIGNED_PTR(dstp)) && (
width),
width);
647 int extrawidth = (
width % 4);
648 vector
unsigned char valigner = VEC_ALIGNER(srcp);
649 vector
unsigned int vs = vec_ld(0, srcp);
653 vector
unsigned char vsel;
654 vector
unsigned int vd;
655 vector
unsigned int voverflow = vec_ld(15, srcp);
657 vs = vec_perm(vs, voverflow, valigner);
659 vsel = (vector
unsigned char) vec_and(vs, vrgbmask);
660 vsel = (vector
unsigned char) vec_cmpeq(vs, vckey);
662 vs = vec_perm(vs, valpha, vpermute);
664 vd = vec_ld(0, dstp);
666 vd = (vector
unsigned int) vec_sel((vector
unsigned char) vs,
667 (vector
unsigned char) vd,
676 ONE_PIXEL_BLEND((extrawidth), extrawidth);
677#undef ONE_PIXEL_BLEND
696 vector
unsigned int vzero = vec_splat_u32(0);
697 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
700 vector
unsigned char valpha;
701 ((
unsigned char *) &valpha)[0] = info->
a;
702 vzero = (vector
unsigned int) vec_splat(valpha, 0);
710 vector
unsigned char valigner;
711 vector
unsigned int vbits;
712 vector
unsigned int voverflow;
720 while ((UNALIGNED_PTR(
dst)) && (
width)) {
725 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
730 extrawidth = (
width % 4);
732 valigner = VEC_ALIGNER(
src);
733 vbits = vec_ld(0,
src);
736 voverflow = vec_ld(15,
src);
739 vbits = vec_perm(vbits, voverflow, valigner);
740 vbits = vec_perm(vbits, vzero, vpermute);
741 vec_st(vbits, 0,
dst);
754 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
769 const int scalar_dst_lead =
sizeof(
Uint32) * 4;
770 const int vector_dst_lead =
sizeof(
Uint32) * 16;
779 vector
unsigned int vzero = vec_splat_u32(0);
780 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
783 vector
unsigned char valpha;
784 ((
unsigned char *) &valpha)[0] = info->
a;
785 vzero = (vector
unsigned int) vec_splat(valpha, 0);
793 vector
unsigned char valigner;
794 vector
unsigned int vbits;
795 vector
unsigned int voverflow;
803 while ((UNALIGNED_PTR(
dst)) && (
width)) {
804 vec_dstt(
src + scalar_dst_lead, DST_CTRL(2, 32, 1024),
806 vec_dstst(
dst + scalar_dst_lead, DST_CTRL(2, 32, 1024),
812 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
817 extrawidth = (
width % 4);
819 valigner = VEC_ALIGNER(
src);
820 vbits = vec_ld(0,
src);
823 vec_dstt(
src + vector_dst_lead, DST_CTRL(2, 32, 1024),
825 vec_dstst(
dst + vector_dst_lead, DST_CTRL(2, 32, 1024),
827 voverflow = vec_ld(15,
src);
830 vbits = vec_perm(vbits, voverflow, valigner);
831 vbits = vec_perm(vbits, vzero, vpermute);
832 vec_st(vbits, 0,
dst);
845 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
853 vec_dss(DST_CHAN_SRC);
854 vec_dss(DST_CHAN_DEST);
860 static Uint32 features = 0xffffffff;
861 if (features == 0xffffffff) {
863 char *
override =
SDL_getenv(
"SDL_ALTIVEC_BLIT_FEATURES");
875 | ((GetL3CacheSize() == 0) ? 4 : 0)
883#pragma altivec_model off
887#define GetBlitFeatures() ((Uint32)(SDL_HasMMX() ? 1 : 0))
891#if SDL_BYTEORDER == SDL_LIL_ENDIAN
900#define RGB888_RGB332(dst, src) { \
901 dst = (Uint8)((((src)&0x00E00000)>>16)| \
902 (((src)&0x0000E000)>>11)| \
903 (((src)&0x000000C0)>>6)); \
908#ifndef USE_DUFFS_LOOP
915 int srcskip, dstskip;
1010#define RGB101010_RGB332(dst, src) { \
1011 dst = (Uint8)((((src)&0x38000000)>>22)| \
1012 (((src)&0x000E0000)>>15)| \
1013 (((src)&0x00000300)>>8)); \
1018#ifndef USE_DUFFS_LOOP
1025 int srcskip, dstskip;
1038#ifdef USE_DUFFS_LOOP
1055 switch (
width & 3) {
1074#ifdef USE_DUFFS_LOOP
1098 switch (
width & 3) {
1120#define RGB888_RGB555(dst, src) { \
1121 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>9)| \
1122 (((*src)&0x0000F800)>>6)| \
1123 (((*src)&0x000000F8)>>3)); \
1125#ifndef USE_DUFFS_LOOP
1126#define RGB888_RGB555_TWO(dst, src) { \
1127 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>9)| \
1128 (((src[HI])&0x0000F800)>>6)| \
1129 (((src[HI])&0x000000F8)>>3))<<16)| \
1130 (((src[LO])&0x00F80000)>>9)| \
1131 (((src[LO])&0x0000F800)>>6)| \
1132 (((src[LO])&0x000000F8)>>3); \
1138#ifndef USE_DUFFS_LOOP
1144 int srcskip, dstskip;
1154#ifdef USE_DUFFS_LOOP
1168 if ((
long)
dst & 0x03) {
1183 RGB888_RGB555_TWO(
dst,
src);
1186 RGB888_RGB555_TWO(
dst,
src);
1191 switch (
width & 3) {
1197 RGB888_RGB555_TWO(
dst,
src);
1214 RGB888_RGB555_TWO(
dst,
src);
1217 RGB888_RGB555_TWO(
dst,
src);
1222 switch (
width & 3) {
1228 RGB888_RGB555_TWO(
dst,
src);
1246#define RGB888_RGB565(dst, src) { \
1247 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>8)| \
1248 (((*src)&0x0000FC00)>>5)| \
1249 (((*src)&0x000000F8)>>3)); \
1251#ifndef USE_DUFFS_LOOP
1252#define RGB888_RGB565_TWO(dst, src) { \
1253 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>8)| \
1254 (((src[HI])&0x0000FC00)>>5)| \
1255 (((src[HI])&0x000000F8)>>3))<<16)| \
1256 (((src[LO])&0x00F80000)>>8)| \
1257 (((src[LO])&0x0000FC00)>>5)| \
1258 (((src[LO])&0x000000F8)>>3); \
1264#ifndef USE_DUFFS_LOOP
1270 int srcskip, dstskip;
1280#ifdef USE_DUFFS_LOOP
1294 if ((
long)
dst & 0x03) {
1309 RGB888_RGB565_TWO(
dst,
src);
1312 RGB888_RGB565_TWO(
dst,
src);
1317 switch (
width & 3) {
1323 RGB888_RGB565_TWO(
dst,
src);
1340 RGB888_RGB565_TWO(
dst,
src);
1343 RGB888_RGB565_TWO(
dst,
src);
1348 switch (
width & 3) {
1354 RGB888_RGB565_TWO(
dst,
src);
1373#define RGB565_32(dst, src, map) (map[src[LO]*2] + map[src[HI]*2+1])
1377#ifndef USE_DUFFS_LOOP
1383 int srcskip, dstskip;
1393#ifdef USE_DUFFS_LOOP
1420 switch (
width & 3) {
1440 0x00000000, 0xff000000, 0x00000008, 0xff002000,
1441 0x00000010, 0xff004000, 0x00000018, 0xff006100,
1442 0x00000020, 0xff008100, 0x00000029, 0xff00a100,
1443 0x00000031, 0xff00c200, 0x00000039, 0xff00e200,
1444 0x00000041, 0xff080000, 0x0000004a, 0xff082000,
1445 0x00000052, 0xff084000, 0x0000005a, 0xff086100,
1446 0x00000062, 0xff088100, 0x0000006a, 0xff08a100,
1447 0x00000073, 0xff08c200, 0x0000007b, 0xff08e200,
1448 0x00000083, 0xff100000, 0x0000008b, 0xff102000,
1449 0x00000094, 0xff104000, 0x0000009c, 0xff106100,
1450 0x000000a4, 0xff108100, 0x000000ac, 0xff10a100,
1451 0x000000b4, 0xff10c200, 0x000000bd, 0xff10e200,
1452 0x000000c5, 0xff180000, 0x000000cd, 0xff182000,
1453 0x000000d5, 0xff184000, 0x000000de, 0xff186100,
1454 0x000000e6, 0xff188100, 0x000000ee, 0xff18a100,
1455 0x000000f6, 0xff18c200, 0x000000ff, 0xff18e200,
1456 0x00000400, 0xff200000, 0x00000408, 0xff202000,
1457 0x00000410, 0xff204000, 0x00000418, 0xff206100,
1458 0x00000420, 0xff208100, 0x00000429, 0xff20a100,
1459 0x00000431, 0xff20c200, 0x00000439, 0xff20e200,
1460 0x00000441, 0xff290000, 0x0000044a, 0xff292000,
1461 0x00000452, 0xff294000, 0x0000045a, 0xff296100,
1462 0x00000462, 0xff298100, 0x0000046a, 0xff29a100,
1463 0x00000473, 0xff29c200, 0x0000047b, 0xff29e200,
1464 0x00000483, 0xff310000, 0x0000048b, 0xff312000,
1465 0x00000494, 0xff314000, 0x0000049c, 0xff316100,
1466 0x000004a4, 0xff318100, 0x000004ac, 0xff31a100,
1467 0x000004b4, 0xff31c200, 0x000004bd, 0xff31e200,
1468 0x000004c5, 0xff390000, 0x000004cd, 0xff392000,
1469 0x000004d5, 0xff394000, 0x000004de, 0xff396100,
1470 0x000004e6, 0xff398100, 0x000004ee, 0xff39a100,
1471 0x000004f6, 0xff39c200, 0x000004ff, 0xff39e200,
1472 0x00000800, 0xff410000, 0x00000808, 0xff412000,
1473 0x00000810, 0xff414000, 0x00000818, 0xff416100,
1474 0x00000820, 0xff418100, 0x00000829, 0xff41a100,
1475 0x00000831, 0xff41c200, 0x00000839, 0xff41e200,
1476 0x00000841, 0xff4a0000, 0x0000084a, 0xff4a2000,
1477 0x00000852, 0xff4a4000, 0x0000085a, 0xff4a6100,
1478 0x00000862, 0xff4a8100, 0x0000086a, 0xff4aa100,
1479 0x00000873, 0xff4ac200, 0x0000087b, 0xff4ae200,
1480 0x00000883, 0xff520000, 0x0000088b, 0xff522000,
1481 0x00000894, 0xff524000, 0x0000089c, 0xff526100,
1482 0x000008a4, 0xff528100, 0x000008ac, 0xff52a100,
1483 0x000008b4, 0xff52c200, 0x000008bd, 0xff52e200,
1484 0x000008c5, 0xff5a0000, 0x000008cd, 0xff5a2000,
1485 0x000008d5, 0xff5a4000, 0x000008de, 0xff5a6100,
1486 0x000008e6, 0xff5a8100, 0x000008ee, 0xff5aa100,
1487 0x000008f6, 0xff5ac200, 0x000008ff, 0xff5ae200,
1488 0x00000c00, 0xff620000, 0x00000c08, 0xff622000,
1489 0x00000c10, 0xff624000, 0x00000c18, 0xff626100,
1490 0x00000c20, 0xff628100, 0x00000c29, 0xff62a100,
1491 0x00000c31, 0xff62c200, 0x00000c39, 0xff62e200,
1492 0x00000c41, 0xff6a0000, 0x00000c4a, 0xff6a2000,
1493 0x00000c52, 0xff6a4000, 0x00000c5a, 0xff6a6100,
1494 0x00000c62, 0xff6a8100, 0x00000c6a, 0xff6aa100,
1495 0x00000c73, 0xff6ac200, 0x00000c7b, 0xff6ae200,
1496 0x00000c83, 0xff730000, 0x00000c8b, 0xff732000,
1497 0x00000c94, 0xff734000, 0x00000c9c, 0xff736100,
1498 0x00000ca4, 0xff738100, 0x00000cac, 0xff73a100,
1499 0x00000cb4, 0xff73c200, 0x00000cbd, 0xff73e200,
1500 0x00000cc5, 0xff7b0000, 0x00000ccd, 0xff7b2000,
1501 0x00000cd5, 0xff7b4000, 0x00000cde, 0xff7b6100,
1502 0x00000ce6, 0xff7b8100, 0x00000cee, 0xff7ba100,
1503 0x00000cf6, 0xff7bc200, 0x00000cff, 0xff7be200,
1504 0x00001000, 0xff830000, 0x00001008, 0xff832000,
1505 0x00001010, 0xff834000, 0x00001018, 0xff836100,
1506 0x00001020, 0xff838100, 0x00001029, 0xff83a100,
1507 0x00001031, 0xff83c200, 0x00001039, 0xff83e200,
1508 0x00001041, 0xff8b0000, 0x0000104a, 0xff8b2000,
1509 0x00001052, 0xff8b4000, 0x0000105a, 0xff8b6100,
1510 0x00001062, 0xff8b8100, 0x0000106a, 0xff8ba100,
1511 0x00001073, 0xff8bc200, 0x0000107b, 0xff8be200,
1512 0x00001083, 0xff940000, 0x0000108b, 0xff942000,
1513 0x00001094, 0xff944000, 0x0000109c, 0xff946100,
1514 0x000010a4, 0xff948100, 0x000010ac, 0xff94a100,
1515 0x000010b4, 0xff94c200, 0x000010bd, 0xff94e200,
1516 0x000010c5, 0xff9c0000, 0x000010cd, 0xff9c2000,
1517 0x000010d5, 0xff9c4000, 0x000010de, 0xff9c6100,
1518 0x000010e6, 0xff9c8100, 0x000010ee, 0xff9ca100,
1519 0x000010f6, 0xff9cc200, 0x000010ff, 0xff9ce200,
1520 0x00001400, 0xffa40000, 0x00001408, 0xffa42000,
1521 0x00001410, 0xffa44000, 0x00001418, 0xffa46100,
1522 0x00001420, 0xffa48100, 0x00001429, 0xffa4a100,
1523 0x00001431, 0xffa4c200, 0x00001439, 0xffa4e200,
1524 0x00001441, 0xffac0000, 0x0000144a, 0xffac2000,
1525 0x00001452, 0xffac4000, 0x0000145a, 0xffac6100,
1526 0x00001462, 0xffac8100, 0x0000146a, 0xffaca100,
1527 0x00001473, 0xffacc200, 0x0000147b, 0xfface200,
1528 0x00001483, 0xffb40000, 0x0000148b, 0xffb42000,
1529 0x00001494, 0xffb44000, 0x0000149c, 0xffb46100,
1530 0x000014a4, 0xffb48100, 0x000014ac, 0xffb4a100,
1531 0x000014b4, 0xffb4c200, 0x000014bd, 0xffb4e200,
1532 0x000014c5, 0xffbd0000, 0x000014cd, 0xffbd2000,
1533 0x000014d5, 0xffbd4000, 0x000014de, 0xffbd6100,
1534 0x000014e6, 0xffbd8100, 0x000014ee, 0xffbda100,
1535 0x000014f6, 0xffbdc200, 0x000014ff, 0xffbde200,
1536 0x00001800, 0xffc50000, 0x00001808, 0xffc52000,
1537 0x00001810, 0xffc54000, 0x00001818, 0xffc56100,
1538 0x00001820, 0xffc58100, 0x00001829, 0xffc5a100,
1539 0x00001831, 0xffc5c200, 0x00001839, 0xffc5e200,
1540 0x00001841, 0xffcd0000, 0x0000184a, 0xffcd2000,
1541 0x00001852, 0xffcd4000, 0x0000185a, 0xffcd6100,
1542 0x00001862, 0xffcd8100, 0x0000186a, 0xffcda100,
1543 0x00001873, 0xffcdc200, 0x0000187b, 0xffcde200,
1544 0x00001883, 0xffd50000, 0x0000188b, 0xffd52000,
1545 0x00001894, 0xffd54000, 0x0000189c, 0xffd56100,
1546 0x000018a4, 0xffd58100, 0x000018ac, 0xffd5a100,
1547 0x000018b4, 0xffd5c200, 0x000018bd, 0xffd5e200,
1548 0x000018c5, 0xffde0000, 0x000018cd, 0xffde2000,
1549 0x000018d5, 0xffde4000, 0x000018de, 0xffde6100,
1550 0x000018e6, 0xffde8100, 0x000018ee, 0xffdea100,
1551 0x000018f6, 0xffdec200, 0x000018ff, 0xffdee200,
1552 0x00001c00, 0xffe60000, 0x00001c08, 0xffe62000,
1553 0x00001c10, 0xffe64000, 0x00001c18, 0xffe66100,
1554 0x00001c20, 0xffe68100, 0x00001c29, 0xffe6a100,
1555 0x00001c31, 0xffe6c200, 0x00001c39, 0xffe6e200,
1556 0x00001c41, 0xffee0000, 0x00001c4a, 0xffee2000,
1557 0x00001c52, 0xffee4000, 0x00001c5a, 0xffee6100,
1558 0x00001c62, 0xffee8100, 0x00001c6a, 0xffeea100,
1559 0x00001c73, 0xffeec200, 0x00001c7b, 0xffeee200,
1560 0x00001c83, 0xfff60000, 0x00001c8b, 0xfff62000,
1561 0x00001c94, 0xfff64000, 0x00001c9c, 0xfff66100,
1562 0x00001ca4, 0xfff68100, 0x00001cac, 0xfff6a100,
1563 0x00001cb4, 0xfff6c200, 0x00001cbd, 0xfff6e200,
1564 0x00001cc5, 0xffff0000, 0x00001ccd, 0xffff2000,
1565 0x00001cd5, 0xffff4000, 0x00001cde, 0xffff6100,
1566 0x00001ce6, 0xffff8100, 0x00001cee, 0xffffa100,
1567 0x00001cf6, 0xffffc200, 0x00001cff, 0xffffe200
1578 0xff000000, 0x00000000, 0xff080000, 0x00002000,
1579 0xff100000, 0x00004000, 0xff180000, 0x00006100,
1580 0xff200000, 0x00008100, 0xff290000, 0x0000a100,
1581 0xff310000, 0x0000c200, 0xff390000, 0x0000e200,
1582 0xff410000, 0x00000008, 0xff4a0000, 0x00002008,
1583 0xff520000, 0x00004008, 0xff5a0000, 0x00006108,
1584 0xff620000, 0x00008108, 0xff6a0000, 0x0000a108,
1585 0xff730000, 0x0000c208, 0xff7b0000, 0x0000e208,
1586 0xff830000, 0x00000010, 0xff8b0000, 0x00002010,
1587 0xff940000, 0x00004010, 0xff9c0000, 0x00006110,
1588 0xffa40000, 0x00008110, 0xffac0000, 0x0000a110,
1589 0xffb40000, 0x0000c210, 0xffbd0000, 0x0000e210,
1590 0xffc50000, 0x00000018, 0xffcd0000, 0x00002018,
1591 0xffd50000, 0x00004018, 0xffde0000, 0x00006118,
1592 0xffe60000, 0x00008118, 0xffee0000, 0x0000a118,
1593 0xfff60000, 0x0000c218, 0xffff0000, 0x0000e218,
1594 0xff000400, 0x00000020, 0xff080400, 0x00002020,
1595 0xff100400, 0x00004020, 0xff180400, 0x00006120,
1596 0xff200400, 0x00008120, 0xff290400, 0x0000a120,
1597 0xff310400, 0x0000c220, 0xff390400, 0x0000e220,
1598 0xff410400, 0x00000029, 0xff4a0400, 0x00002029,
1599 0xff520400, 0x00004029, 0xff5a0400, 0x00006129,
1600 0xff620400, 0x00008129, 0xff6a0400, 0x0000a129,
1601 0xff730400, 0x0000c229, 0xff7b0400, 0x0000e229,
1602 0xff830400, 0x00000031, 0xff8b0400, 0x00002031,
1603 0xff940400, 0x00004031, 0xff9c0400, 0x00006131,
1604 0xffa40400, 0x00008131, 0xffac0400, 0x0000a131,
1605 0xffb40400, 0x0000c231, 0xffbd0400, 0x0000e231,
1606 0xffc50400, 0x00000039, 0xffcd0400, 0x00002039,
1607 0xffd50400, 0x00004039, 0xffde0400, 0x00006139,
1608 0xffe60400, 0x00008139, 0xffee0400, 0x0000a139,
1609 0xfff60400, 0x0000c239, 0xffff0400, 0x0000e239,
1610 0xff000800, 0x00000041, 0xff080800, 0x00002041,
1611 0xff100800, 0x00004041, 0xff180800, 0x00006141,
1612 0xff200800, 0x00008141, 0xff290800, 0x0000a141,
1613 0xff310800, 0x0000c241, 0xff390800, 0x0000e241,
1614 0xff410800, 0x0000004a, 0xff4a0800, 0x0000204a,
1615 0xff520800, 0x0000404a, 0xff5a0800, 0x0000614a,
1616 0xff620800, 0x0000814a, 0xff6a0800, 0x0000a14a,
1617 0xff730800, 0x0000c24a, 0xff7b0800, 0x0000e24a,
1618 0xff830800, 0x00000052, 0xff8b0800, 0x00002052,
1619 0xff940800, 0x00004052, 0xff9c0800, 0x00006152,
1620 0xffa40800, 0x00008152, 0xffac0800, 0x0000a152,
1621 0xffb40800, 0x0000c252, 0xffbd0800, 0x0000e252,
1622 0xffc50800, 0x0000005a, 0xffcd0800, 0x0000205a,
1623 0xffd50800, 0x0000405a, 0xffde0800, 0x0000615a,
1624 0xffe60800, 0x0000815a, 0xffee0800, 0x0000a15a,
1625 0xfff60800, 0x0000c25a, 0xffff0800, 0x0000e25a,
1626 0xff000c00, 0x00000062, 0xff080c00, 0x00002062,
1627 0xff100c00, 0x00004062, 0xff180c00, 0x00006162,
1628 0xff200c00, 0x00008162, 0xff290c00, 0x0000a162,
1629 0xff310c00, 0x0000c262, 0xff390c00, 0x0000e262,
1630 0xff410c00, 0x0000006a, 0xff4a0c00, 0x0000206a,
1631 0xff520c00, 0x0000406a, 0xff5a0c00, 0x0000616a,
1632 0xff620c00, 0x0000816a, 0xff6a0c00, 0x0000a16a,
1633 0xff730c00, 0x0000c26a, 0xff7b0c00, 0x0000e26a,
1634 0xff830c00, 0x00000073, 0xff8b0c00, 0x00002073,
1635 0xff940c00, 0x00004073, 0xff9c0c00, 0x00006173,
1636 0xffa40c00, 0x00008173, 0xffac0c00, 0x0000a173,
1637 0xffb40c00, 0x0000c273, 0xffbd0c00, 0x0000e273,
1638 0xffc50c00, 0x0000007b, 0xffcd0c00, 0x0000207b,
1639 0xffd50c00, 0x0000407b, 0xffde0c00, 0x0000617b,
1640 0xffe60c00, 0x0000817b, 0xffee0c00, 0x0000a17b,
1641 0xfff60c00, 0x0000c27b, 0xffff0c00, 0x0000e27b,
1642 0xff001000, 0x00000083, 0xff081000, 0x00002083,
1643 0xff101000, 0x00004083, 0xff181000, 0x00006183,
1644 0xff201000, 0x00008183, 0xff291000, 0x0000a183,
1645 0xff311000, 0x0000c283, 0xff391000, 0x0000e283,
1646 0xff411000, 0x0000008b, 0xff4a1000, 0x0000208b,
1647 0xff521000, 0x0000408b, 0xff5a1000, 0x0000618b,
1648 0xff621000, 0x0000818b, 0xff6a1000, 0x0000a18b,
1649 0xff731000, 0x0000c28b, 0xff7b1000, 0x0000e28b,
1650 0xff831000, 0x00000094, 0xff8b1000, 0x00002094,
1651 0xff941000, 0x00004094, 0xff9c1000, 0x00006194,
1652 0xffa41000, 0x00008194, 0xffac1000, 0x0000a194,
1653 0xffb41000, 0x0000c294, 0xffbd1000, 0x0000e294,
1654 0xffc51000, 0x0000009c, 0xffcd1000, 0x0000209c,
1655 0xffd51000, 0x0000409c, 0xffde1000, 0x0000619c,
1656 0xffe61000, 0x0000819c, 0xffee1000, 0x0000a19c,
1657 0xfff61000, 0x0000c29c, 0xffff1000, 0x0000e29c,
1658 0xff001400, 0x000000a4, 0xff081400, 0x000020a4,
1659 0xff101400, 0x000040a4, 0xff181400, 0x000061a4,
1660 0xff201400, 0x000081a4, 0xff291400, 0x0000a1a4,
1661 0xff311400, 0x0000c2a4, 0xff391400, 0x0000e2a4,
1662 0xff411400, 0x000000ac, 0xff4a1400, 0x000020ac,
1663 0xff521400, 0x000040ac, 0xff5a1400, 0x000061ac,
1664 0xff621400, 0x000081ac, 0xff6a1400, 0x0000a1ac,
1665 0xff731400, 0x0000c2ac, 0xff7b1400, 0x0000e2ac,
1666 0xff831400, 0x000000b4, 0xff8b1400, 0x000020b4,
1667 0xff941400, 0x000040b4, 0xff9c1400, 0x000061b4,
1668 0xffa41400, 0x000081b4, 0xffac1400, 0x0000a1b4,
1669 0xffb41400, 0x0000c2b4, 0xffbd1400, 0x0000e2b4,
1670 0xffc51400, 0x000000bd, 0xffcd1400, 0x000020bd,
1671 0xffd51400, 0x000040bd, 0xffde1400, 0x000061bd,
1672 0xffe61400, 0x000081bd, 0xffee1400, 0x0000a1bd,
1673 0xfff61400, 0x0000c2bd, 0xffff1400, 0x0000e2bd,
1674 0xff001800, 0x000000c5, 0xff081800, 0x000020c5,
1675 0xff101800, 0x000040c5, 0xff181800, 0x000061c5,
1676 0xff201800, 0x000081c5, 0xff291800, 0x0000a1c5,
1677 0xff311800, 0x0000c2c5, 0xff391800, 0x0000e2c5,
1678 0xff411800, 0x000000cd, 0xff4a1800, 0x000020cd,
1679 0xff521800, 0x000040cd, 0xff5a1800, 0x000061cd,
1680 0xff621800, 0x000081cd, 0xff6a1800, 0x0000a1cd,
1681 0xff731800, 0x0000c2cd, 0xff7b1800, 0x0000e2cd,
1682 0xff831800, 0x000000d5, 0xff8b1800, 0x000020d5,
1683 0xff941800, 0x000040d5, 0xff9c1800, 0x000061d5,
1684 0xffa41800, 0x000081d5, 0xffac1800, 0x0000a1d5,
1685 0xffb41800, 0x0000c2d5, 0xffbd1800, 0x0000e2d5,
1686 0xffc51800, 0x000000de, 0xffcd1800, 0x000020de,
1687 0xffd51800, 0x000040de, 0xffde1800, 0x000061de,
1688 0xffe61800, 0x000081de, 0xffee1800, 0x0000a1de,
1689 0xfff61800, 0x0000c2de, 0xffff1800, 0x0000e2de,
1690 0xff001c00, 0x000000e6, 0xff081c00, 0x000020e6,
1691 0xff101c00, 0x000040e6, 0xff181c00, 0x000061e6,
1692 0xff201c00, 0x000081e6, 0xff291c00, 0x0000a1e6,
1693 0xff311c00, 0x0000c2e6, 0xff391c00, 0x0000e2e6,
1694 0xff411c00, 0x000000ee, 0xff4a1c00, 0x000020ee,
1695 0xff521c00, 0x000040ee, 0xff5a1c00, 0x000061ee,
1696 0xff621c00, 0x000081ee, 0xff6a1c00, 0x0000a1ee,
1697 0xff731c00, 0x0000c2ee, 0xff7b1c00, 0x0000e2ee,
1698 0xff831c00, 0x000000f6, 0xff8b1c00, 0x000020f6,
1699 0xff941c00, 0x000040f6, 0xff9c1c00, 0x000061f6,
1700 0xffa41c00, 0x000081f6, 0xffac1c00, 0x0000a1f6,
1701 0xffb41c00, 0x0000c2f6, 0xffbd1c00, 0x0000e2f6,
1702 0xffc51c00, 0x000000ff, 0xffcd1c00, 0x000020ff,
1703 0xffd51c00, 0x000040ff, 0xffde1c00, 0x000061ff,
1704 0xffe61c00, 0x000081ff, 0xffee1c00, 0x0000a1ff,
1705 0xfff61c00, 0x0000c2ff, 0xffff1c00, 0x0000e2ff
1716 0x000000ff, 0x00000000, 0x000008ff, 0x00200000,
1717 0x000010ff, 0x00400000, 0x000018ff, 0x00610000,
1718 0x000020ff, 0x00810000, 0x000029ff, 0x00a10000,
1719 0x000031ff, 0x00c20000, 0x000039ff, 0x00e20000,
1720 0x000041ff, 0x08000000, 0x00004aff, 0x08200000,
1721 0x000052ff, 0x08400000, 0x00005aff, 0x08610000,
1722 0x000062ff, 0x08810000, 0x00006aff, 0x08a10000,
1723 0x000073ff, 0x08c20000, 0x00007bff, 0x08e20000,
1724 0x000083ff, 0x10000000, 0x00008bff, 0x10200000,
1725 0x000094ff, 0x10400000, 0x00009cff, 0x10610000,
1726 0x0000a4ff, 0x10810000, 0x0000acff, 0x10a10000,
1727 0x0000b4ff, 0x10c20000, 0x0000bdff, 0x10e20000,
1728 0x0000c5ff, 0x18000000, 0x0000cdff, 0x18200000,
1729 0x0000d5ff, 0x18400000, 0x0000deff, 0x18610000,
1730 0x0000e6ff, 0x18810000, 0x0000eeff, 0x18a10000,
1731 0x0000f6ff, 0x18c20000, 0x0000ffff, 0x18e20000,
1732 0x000400ff, 0x20000000, 0x000408ff, 0x20200000,
1733 0x000410ff, 0x20400000, 0x000418ff, 0x20610000,
1734 0x000420ff, 0x20810000, 0x000429ff, 0x20a10000,
1735 0x000431ff, 0x20c20000, 0x000439ff, 0x20e20000,
1736 0x000441ff, 0x29000000, 0x00044aff, 0x29200000,
1737 0x000452ff, 0x29400000, 0x00045aff, 0x29610000,
1738 0x000462ff, 0x29810000, 0x00046aff, 0x29a10000,
1739 0x000473ff, 0x29c20000, 0x00047bff, 0x29e20000,
1740 0x000483ff, 0x31000000, 0x00048bff, 0x31200000,
1741 0x000494ff, 0x31400000, 0x00049cff, 0x31610000,
1742 0x0004a4ff, 0x31810000, 0x0004acff, 0x31a10000,
1743 0x0004b4ff, 0x31c20000, 0x0004bdff, 0x31e20000,
1744 0x0004c5ff, 0x39000000, 0x0004cdff, 0x39200000,
1745 0x0004d5ff, 0x39400000, 0x0004deff, 0x39610000,
1746 0x0004e6ff, 0x39810000, 0x0004eeff, 0x39a10000,
1747 0x0004f6ff, 0x39c20000, 0x0004ffff, 0x39e20000,
1748 0x000800ff, 0x41000000, 0x000808ff, 0x41200000,
1749 0x000810ff, 0x41400000, 0x000818ff, 0x41610000,
1750 0x000820ff, 0x41810000, 0x000829ff, 0x41a10000,
1751 0x000831ff, 0x41c20000, 0x000839ff, 0x41e20000,
1752 0x000841ff, 0x4a000000, 0x00084aff, 0x4a200000,
1753 0x000852ff, 0x4a400000, 0x00085aff, 0x4a610000,
1754 0x000862ff, 0x4a810000, 0x00086aff, 0x4aa10000,
1755 0x000873ff, 0x4ac20000, 0x00087bff, 0x4ae20000,
1756 0x000883ff, 0x52000000, 0x00088bff, 0x52200000,
1757 0x000894ff, 0x52400000, 0x00089cff, 0x52610000,
1758 0x0008a4ff, 0x52810000, 0x0008acff, 0x52a10000,
1759 0x0008b4ff, 0x52c20000, 0x0008bdff, 0x52e20000,
1760 0x0008c5ff, 0x5a000000, 0x0008cdff, 0x5a200000,
1761 0x0008d5ff, 0x5a400000, 0x0008deff, 0x5a610000,
1762 0x0008e6ff, 0x5a810000, 0x0008eeff, 0x5aa10000,
1763 0x0008f6ff, 0x5ac20000, 0x0008ffff, 0x5ae20000,
1764 0x000c00ff, 0x62000000, 0x000c08ff, 0x62200000,
1765 0x000c10ff, 0x62400000, 0x000c18ff, 0x62610000,
1766 0x000c20ff, 0x62810000, 0x000c29ff, 0x62a10000,
1767 0x000c31ff, 0x62c20000, 0x000c39ff, 0x62e20000,
1768 0x000c41ff, 0x6a000000, 0x000c4aff, 0x6a200000,
1769 0x000c52ff, 0x6a400000, 0x000c5aff, 0x6a610000,
1770 0x000c62ff, 0x6a810000, 0x000c6aff, 0x6aa10000,
1771 0x000c73ff, 0x6ac20000, 0x000c7bff, 0x6ae20000,
1772 0x000c83ff, 0x73000000, 0x000c8bff, 0x73200000,
1773 0x000c94ff, 0x73400000, 0x000c9cff, 0x73610000,
1774 0x000ca4ff, 0x73810000, 0x000cacff, 0x73a10000,
1775 0x000cb4ff, 0x73c20000, 0x000cbdff, 0x73e20000,
1776 0x000cc5ff, 0x7b000000, 0x000ccdff, 0x7b200000,
1777 0x000cd5ff, 0x7b400000, 0x000cdeff, 0x7b610000,
1778 0x000ce6ff, 0x7b810000, 0x000ceeff, 0x7ba10000,
1779 0x000cf6ff, 0x7bc20000, 0x000cffff, 0x7be20000,
1780 0x001000ff, 0x83000000, 0x001008ff, 0x83200000,
1781 0x001010ff, 0x83400000, 0x001018ff, 0x83610000,
1782 0x001020ff, 0x83810000, 0x001029ff, 0x83a10000,
1783 0x001031ff, 0x83c20000, 0x001039ff, 0x83e20000,
1784 0x001041ff, 0x8b000000, 0x00104aff, 0x8b200000,
1785 0x001052ff, 0x8b400000, 0x00105aff, 0x8b610000,
1786 0x001062ff, 0x8b810000, 0x00106aff, 0x8ba10000,
1787 0x001073ff, 0x8bc20000, 0x00107bff, 0x8be20000,
1788 0x001083ff, 0x94000000, 0x00108bff, 0x94200000,
1789 0x001094ff, 0x94400000, 0x00109cff, 0x94610000,
1790 0x0010a4ff, 0x94810000, 0x0010acff, 0x94a10000,
1791 0x0010b4ff, 0x94c20000, 0x0010bdff, 0x94e20000,
1792 0x0010c5ff, 0x9c000000, 0x0010cdff, 0x9c200000,
1793 0x0010d5ff, 0x9c400000, 0x0010deff, 0x9c610000,
1794 0x0010e6ff, 0x9c810000, 0x0010eeff, 0x9ca10000,
1795 0x0010f6ff, 0x9cc20000, 0x0010ffff, 0x9ce20000,
1796 0x001400ff, 0xa4000000, 0x001408ff, 0xa4200000,
1797 0x001410ff, 0xa4400000, 0x001418ff, 0xa4610000,
1798 0x001420ff, 0xa4810000, 0x001429ff, 0xa4a10000,
1799 0x001431ff, 0xa4c20000, 0x001439ff, 0xa4e20000,
1800 0x001441ff, 0xac000000, 0x00144aff, 0xac200000,
1801 0x001452ff, 0xac400000, 0x00145aff, 0xac610000,
1802 0x001462ff, 0xac810000, 0x00146aff, 0xaca10000,
1803 0x001473ff, 0xacc20000, 0x00147bff, 0xace20000,
1804 0x001483ff, 0xb4000000, 0x00148bff, 0xb4200000,
1805 0x001494ff, 0xb4400000, 0x00149cff, 0xb4610000,
1806 0x0014a4ff, 0xb4810000, 0x0014acff, 0xb4a10000,
1807 0x0014b4ff, 0xb4c20000, 0x0014bdff, 0xb4e20000,
1808 0x0014c5ff, 0xbd000000, 0x0014cdff, 0xbd200000,
1809 0x0014d5ff, 0xbd400000, 0x0014deff, 0xbd610000,
1810 0x0014e6ff, 0xbd810000, 0x0014eeff, 0xbda10000,
1811 0x0014f6ff, 0xbdc20000, 0x0014ffff, 0xbde20000,
1812 0x001800ff, 0xc5000000, 0x001808ff, 0xc5200000,
1813 0x001810ff, 0xc5400000, 0x001818ff, 0xc5610000,
1814 0x001820ff, 0xc5810000, 0x001829ff, 0xc5a10000,
1815 0x001831ff, 0xc5c20000, 0x001839ff, 0xc5e20000,
1816 0x001841ff, 0xcd000000, 0x00184aff, 0xcd200000,
1817 0x001852ff, 0xcd400000, 0x00185aff, 0xcd610000,
1818 0x001862ff, 0xcd810000, 0x00186aff, 0xcda10000,
1819 0x001873ff, 0xcdc20000, 0x00187bff, 0xcde20000,
1820 0x001883ff, 0xd5000000, 0x00188bff, 0xd5200000,
1821 0x001894ff, 0xd5400000, 0x00189cff, 0xd5610000,
1822 0x0018a4ff, 0xd5810000, 0x0018acff, 0xd5a10000,
1823 0x0018b4ff, 0xd5c20000, 0x0018bdff, 0xd5e20000,
1824 0x0018c5ff, 0xde000000, 0x0018cdff, 0xde200000,
1825 0x0018d5ff, 0xde400000, 0x0018deff, 0xde610000,
1826 0x0018e6ff, 0xde810000, 0x0018eeff, 0xdea10000,
1827 0x0018f6ff, 0xdec20000, 0x0018ffff, 0xdee20000,
1828 0x001c00ff, 0xe6000000, 0x001c08ff, 0xe6200000,
1829 0x001c10ff, 0xe6400000, 0x001c18ff, 0xe6610000,
1830 0x001c20ff, 0xe6810000, 0x001c29ff, 0xe6a10000,
1831 0x001c31ff, 0xe6c20000, 0x001c39ff, 0xe6e20000,
1832 0x001c41ff, 0xee000000, 0x001c4aff, 0xee200000,
1833 0x001c52ff, 0xee400000, 0x001c5aff, 0xee610000,
1834 0x001c62ff, 0xee810000, 0x001c6aff, 0xeea10000,
1835 0x001c73ff, 0xeec20000, 0x001c7bff, 0xeee20000,
1836 0x001c83ff, 0xf6000000, 0x001c8bff, 0xf6200000,
1837 0x001c94ff, 0xf6400000, 0x001c9cff, 0xf6610000,
1838 0x001ca4ff, 0xf6810000, 0x001cacff, 0xf6a10000,
1839 0x001cb4ff, 0xf6c20000, 0x001cbdff, 0xf6e20000,
1840 0x001cc5ff, 0xff000000, 0x001ccdff, 0xff200000,
1841 0x001cd5ff, 0xff400000, 0x001cdeff, 0xff610000,
1842 0x001ce6ff, 0xff810000, 0x001ceeff, 0xffa10000,
1843 0x001cf6ff, 0xffc20000, 0x001cffff, 0xffe20000,
1854 0x00000000, 0x000000ff, 0x08000000, 0x002000ff,
1855 0x10000000, 0x004000ff, 0x18000000, 0x006100ff,
1856 0x20000000, 0x008100ff, 0x29000000, 0x00a100ff,
1857 0x31000000, 0x00c200ff, 0x39000000, 0x00e200ff,
1858 0x41000000, 0x000008ff, 0x4a000000, 0x002008ff,
1859 0x52000000, 0x004008ff, 0x5a000000, 0x006108ff,
1860 0x62000000, 0x008108ff, 0x6a000000, 0x00a108ff,
1861 0x73000000, 0x00c208ff, 0x7b000000, 0x00e208ff,
1862 0x83000000, 0x000010ff, 0x8b000000, 0x002010ff,
1863 0x94000000, 0x004010ff, 0x9c000000, 0x006110ff,
1864 0xa4000000, 0x008110ff, 0xac000000, 0x00a110ff,
1865 0xb4000000, 0x00c210ff, 0xbd000000, 0x00e210ff,
1866 0xc5000000, 0x000018ff, 0xcd000000, 0x002018ff,
1867 0xd5000000, 0x004018ff, 0xde000000, 0x006118ff,
1868 0xe6000000, 0x008118ff, 0xee000000, 0x00a118ff,
1869 0xf6000000, 0x00c218ff, 0xff000000, 0x00e218ff,
1870 0x00040000, 0x000020ff, 0x08040000, 0x002020ff,
1871 0x10040000, 0x004020ff, 0x18040000, 0x006120ff,
1872 0x20040000, 0x008120ff, 0x29040000, 0x00a120ff,
1873 0x31040000, 0x00c220ff, 0x39040000, 0x00e220ff,
1874 0x41040000, 0x000029ff, 0x4a040000, 0x002029ff,
1875 0x52040000, 0x004029ff, 0x5a040000, 0x006129ff,
1876 0x62040000, 0x008129ff, 0x6a040000, 0x00a129ff,
1877 0x73040000, 0x00c229ff, 0x7b040000, 0x00e229ff,
1878 0x83040000, 0x000031ff, 0x8b040000, 0x002031ff,
1879 0x94040000, 0x004031ff, 0x9c040000, 0x006131ff,
1880 0xa4040000, 0x008131ff, 0xac040000, 0x00a131ff,
1881 0xb4040000, 0x00c231ff, 0xbd040000, 0x00e231ff,
1882 0xc5040000, 0x000039ff, 0xcd040000, 0x002039ff,
1883 0xd5040000, 0x004039ff, 0xde040000, 0x006139ff,
1884 0xe6040000, 0x008139ff, 0xee040000, 0x00a139ff,
1885 0xf6040000, 0x00c239ff, 0xff040000, 0x00e239ff,
1886 0x00080000, 0x000041ff, 0x08080000, 0x002041ff,
1887 0x10080000, 0x004041ff, 0x18080000, 0x006141ff,
1888 0x20080000, 0x008141ff, 0x29080000, 0x00a141ff,
1889 0x31080000, 0x00c241ff, 0x39080000, 0x00e241ff,
1890 0x41080000, 0x00004aff, 0x4a080000, 0x00204aff,
1891 0x52080000, 0x00404aff, 0x5a080000, 0x00614aff,
1892 0x62080000, 0x00814aff, 0x6a080000, 0x00a14aff,
1893 0x73080000, 0x00c24aff, 0x7b080000, 0x00e24aff,
1894 0x83080000, 0x000052ff, 0x8b080000, 0x002052ff,
1895 0x94080000, 0x004052ff, 0x9c080000, 0x006152ff,
1896 0xa4080000, 0x008152ff, 0xac080000, 0x00a152ff,
1897 0xb4080000, 0x00c252ff, 0xbd080000, 0x00e252ff,
1898 0xc5080000, 0x00005aff, 0xcd080000, 0x00205aff,
1899 0xd5080000, 0x00405aff, 0xde080000, 0x00615aff,
1900 0xe6080000, 0x00815aff, 0xee080000, 0x00a15aff,
1901 0xf6080000, 0x00c25aff, 0xff080000, 0x00e25aff,
1902 0x000c0000, 0x000062ff, 0x080c0000, 0x002062ff,
1903 0x100c0000, 0x004062ff, 0x180c0000, 0x006162ff,
1904 0x200c0000, 0x008162ff, 0x290c0000, 0x00a162ff,
1905 0x310c0000, 0x00c262ff, 0x390c0000, 0x00e262ff,
1906 0x410c0000, 0x00006aff, 0x4a0c0000, 0x00206aff,
1907 0x520c0000, 0x00406aff, 0x5a0c0000, 0x00616aff,
1908 0x620c0000, 0x00816aff, 0x6a0c0000, 0x00a16aff,
1909 0x730c0000, 0x00c26aff, 0x7b0c0000, 0x00e26aff,
1910 0x830c0000, 0x000073ff, 0x8b0c0000, 0x002073ff,
1911 0x940c0000, 0x004073ff, 0x9c0c0000, 0x006173ff,
1912 0xa40c0000, 0x008173ff, 0xac0c0000, 0x00a173ff,
1913 0xb40c0000, 0x00c273ff, 0xbd0c0000, 0x00e273ff,
1914 0xc50c0000, 0x00007bff, 0xcd0c0000, 0x00207bff,
1915 0xd50c0000, 0x00407bff, 0xde0c0000, 0x00617bff,
1916 0xe60c0000, 0x00817bff, 0xee0c0000, 0x00a17bff,
1917 0xf60c0000, 0x00c27bff, 0xff0c0000, 0x00e27bff,
1918 0x00100000, 0x000083ff, 0x08100000, 0x002083ff,
1919 0x10100000, 0x004083ff, 0x18100000, 0x006183ff,
1920 0x20100000, 0x008183ff, 0x29100000, 0x00a183ff,
1921 0x31100000, 0x00c283ff, 0x39100000, 0x00e283ff,
1922 0x41100000, 0x00008bff, 0x4a100000, 0x00208bff,
1923 0x52100000, 0x00408bff, 0x5a100000, 0x00618bff,
1924 0x62100000, 0x00818bff, 0x6a100000, 0x00a18bff,
1925 0x73100000, 0x00c28bff, 0x7b100000, 0x00e28bff,
1926 0x83100000, 0x000094ff, 0x8b100000, 0x002094ff,
1927 0x94100000, 0x004094ff, 0x9c100000, 0x006194ff,
1928 0xa4100000, 0x008194ff, 0xac100000, 0x00a194ff,
1929 0xb4100000, 0x00c294ff, 0xbd100000, 0x00e294ff,
1930 0xc5100000, 0x00009cff, 0xcd100000, 0x00209cff,
1931 0xd5100000, 0x00409cff, 0xde100000, 0x00619cff,
1932 0xe6100000, 0x00819cff, 0xee100000, 0x00a19cff,
1933 0xf6100000, 0x00c29cff, 0xff100000, 0x00e29cff,
1934 0x00140000, 0x0000a4ff, 0x08140000, 0x0020a4ff,
1935 0x10140000, 0x0040a4ff, 0x18140000, 0x0061a4ff,
1936 0x20140000, 0x0081a4ff, 0x29140000, 0x00a1a4ff,
1937 0x31140000, 0x00c2a4ff, 0x39140000, 0x00e2a4ff,
1938 0x41140000, 0x0000acff, 0x4a140000, 0x0020acff,
1939 0x52140000, 0x0040acff, 0x5a140000, 0x0061acff,
1940 0x62140000, 0x0081acff, 0x6a140000, 0x00a1acff,
1941 0x73140000, 0x00c2acff, 0x7b140000, 0x00e2acff,
1942 0x83140000, 0x0000b4ff, 0x8b140000, 0x0020b4ff,
1943 0x94140000, 0x0040b4ff, 0x9c140000, 0x0061b4ff,
1944 0xa4140000, 0x0081b4ff, 0xac140000, 0x00a1b4ff,
1945 0xb4140000, 0x00c2b4ff, 0xbd140000, 0x00e2b4ff,
1946 0xc5140000, 0x0000bdff, 0xcd140000, 0x0020bdff,
1947 0xd5140000, 0x0040bdff, 0xde140000, 0x0061bdff,
1948 0xe6140000, 0x0081bdff, 0xee140000, 0x00a1bdff,
1949 0xf6140000, 0x00c2bdff, 0xff140000, 0x00e2bdff,
1950 0x00180000, 0x0000c5ff, 0x08180000, 0x0020c5ff,
1951 0x10180000, 0x0040c5ff, 0x18180000, 0x0061c5ff,
1952 0x20180000, 0x0081c5ff, 0x29180000, 0x00a1c5ff,
1953 0x31180000, 0x00c2c5ff, 0x39180000, 0x00e2c5ff,
1954 0x41180000, 0x0000cdff, 0x4a180000, 0x0020cdff,
1955 0x52180000, 0x0040cdff, 0x5a180000, 0x0061cdff,
1956 0x62180000, 0x0081cdff, 0x6a180000, 0x00a1cdff,
1957 0x73180000, 0x00c2cdff, 0x7b180000, 0x00e2cdff,
1958 0x83180000, 0x0000d5ff, 0x8b180000, 0x0020d5ff,
1959 0x94180000, 0x0040d5ff, 0x9c180000, 0x0061d5ff,
1960 0xa4180000, 0x0081d5ff, 0xac180000, 0x00a1d5ff,
1961 0xb4180000, 0x00c2d5ff, 0xbd180000, 0x00e2d5ff,
1962 0xc5180000, 0x0000deff, 0xcd180000, 0x0020deff,
1963 0xd5180000, 0x0040deff, 0xde180000, 0x0061deff,
1964 0xe6180000, 0x0081deff, 0xee180000, 0x00a1deff,
1965 0xf6180000, 0x00c2deff, 0xff180000, 0x00e2deff,
1966 0x001c0000, 0x0000e6ff, 0x081c0000, 0x0020e6ff,
1967 0x101c0000, 0x0040e6ff, 0x181c0000, 0x0061e6ff,
1968 0x201c0000, 0x0081e6ff, 0x291c0000, 0x00a1e6ff,
1969 0x311c0000, 0x00c2e6ff, 0x391c0000, 0x00e2e6ff,
1970 0x411c0000, 0x0000eeff, 0x4a1c0000, 0x0020eeff,
1971 0x521c0000, 0x0040eeff, 0x5a1c0000, 0x0061eeff,
1972 0x621c0000, 0x0081eeff, 0x6a1c0000, 0x00a1eeff,
1973 0x731c0000, 0x00c2eeff, 0x7b1c0000, 0x00e2eeff,
1974 0x831c0000, 0x0000f6ff, 0x8b1c0000, 0x0020f6ff,
1975 0x941c0000, 0x0040f6ff, 0x9c1c0000, 0x0061f6ff,
1976 0xa41c0000, 0x0081f6ff, 0xac1c0000, 0x00a1f6ff,
1977 0xb41c0000, 0x00c2f6ff, 0xbd1c0000, 0x00e2f6ff,
1978 0xc51c0000, 0x0000ffff, 0xcd1c0000, 0x0020ffff,
1979 0xd51c0000, 0x0040ffff, 0xde1c0000, 0x0061ffff,
1980 0xe61c0000, 0x0081ffff, 0xee1c0000, 0x00a1ffff,
1981 0xf61c0000, 0x00c2ffff, 0xff1c0000, 0x00e2ffff
1993#ifndef USE_DUFFS_LOOP
2000 int srcskip, dstskip;
2019#ifdef USE_DUFFS_LOOP
2026 *
dst = ((sR>>5)<<(3+2))|
2039 *
dst = ((sR >> 5) << (3 + 2)) |
2040 ((sG >> 5) << (2)) | ((sB >> 6) << (0));
2051#ifdef USE_DUFFS_LOOP
2058 *
dst =
map[((sR>>5)<<(3+2))|
2071 *
dst =
map[((sR >> 5) << (3 + 2)) |
2072 ((sG >> 5) << (2)) | ((sB >> 6) << (0))];
2097 if (dstfmt->
Amask) {
2164 int *_p0 ,
int *_p1,
int *_p2,
int *_p3,
int *_alpha_channel)
2166 int alpha_channel = 0, p0, p1, p2, p3;
2167#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2168 int Pixel = 0x04030201;
2170 int Pixel = 0x01020304;
2175 if (srcfmt->
Amask) {
2182 if (dstfmt->
Amask) {
2183 if (srcfmt->
Amask) {
2192#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2194 p1 = (Pixel >> 8) & 0xFF;
2195 p2 = (Pixel >> 16) & 0xFF;
2196 p3 = (Pixel >> 24) & 0xFF;
2199 p2 = (Pixel >> 8) & 0xFF;
2200 p1 = (Pixel >> 16) & 0xFF;
2201 p0 = (Pixel >> 24) & 0xFF;
2207 }
else if (p1 == 0) {
2210 }
else if (p2 == 0) {
2213 }
else if (p3 == 0) {
2218#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2220 if (srcbpp == 3 && dstbpp == 4) {
2225 }
else if (srcbpp == 4 && dstbpp == 3) {
2236 if (_alpha_channel) {
2237 *_alpha_channel = alpha_channel;
2258#if HAVE_FAST_WRITE_INT8
2260 if (srcbpp == 4 && dstbpp == 4 &&
2265 int alpha_channel, p0, p1, p2, p3;
2289 if (srcbpp == 4 && dstbpp == 3 &&
2313#if HAVE_FAST_WRITE_INT8
2315 if (srcbpp == 3 && dstbpp == 4 &&
2319 int alpha_channel, p0, p1, p2, p3;
2377#if HAVE_FAST_WRITE_INT8
2379 if (srcbpp == 4 && dstbpp == 4 &&
2409 unsigned sR, sG, sB, sA;
2432 Uint32 rgbmask = ~srcfmt->Amask;
2435 unsigned sR, sG, sB;
2441 if (palmap ==
NULL) {
2448 if ( (Pixel & rgbmask) != ckey ) {
2469 if ( (Pixel & rgbmask) != ckey ) {
2471 *
dst = (
Uint8)palmap[((sR>>5)<<(3+2))|
2496 Uint32 rgbmask = ~info->src_fmt->Amask;
2507 if ( (*srcp & rgbmask) != ckey ) {
2535 Uint32 rgbmask = ~srcfmt->Amask;
2536 int sfmt = srcfmt->
format;
2537 int dfmt = dstfmt->
format;
2547 if (dstfmt->
Amask) {
2554 if ((*src32 & rgbmask) != ckey) {
2555 *dst32 = *src32 |
mask;
2572 if ((*src32 & rgbmask) != ckey) {
2573 *dst32 = *src32 &
mask;
2586#if HAVE_FAST_WRITE_INT8
2588 if (srcbpp == 4 && dstbpp == 4 &&
2593 int alpha_channel, p0, p1, p2, p3;
2602 if ((*src32 & rgbmask) != ckey) {
2624#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2625 Uint8 k0 = ckey & 0xFF;
2626 Uint8 k1 = (ckey >> 8) & 0xFF;
2627 Uint8 k2 = (ckey >> 16) & 0xFF;
2629 Uint8 k0 = (ckey >> 16) & 0xFF;
2630 Uint8 k1 = (ckey >> 8) & 0xFF;
2631 Uint8 k2 = ckey & 0xFF;
2642 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2662#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2663 Uint8 k0 = ckey & 0xFF;
2664 Uint8 k1 = (ckey >> 8) & 0xFF;
2665 Uint8 k2 = (ckey >> 16) & 0xFF;
2667 Uint8 k0 = (ckey >> 16) & 0xFF;
2668 Uint8 k1 = (ckey >> 8) & 0xFF;
2669 Uint8 k2 = ckey & 0xFF;
2679 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2697 if (srcbpp == 4 && dstbpp == 3 &&
2709 if ((*src32 & rgbmask) != ckey) {
2724#if HAVE_FAST_WRITE_INT8
2726 if (srcbpp == 3 && dstbpp == 4 &&
2729#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2730 Uint8 k0 = ckey & 0xFF;
2731 Uint8 k1 = (ckey >> 8) & 0xFF;
2732 Uint8 k2 = (ckey >> 16) & 0xFF;
2734 Uint8 k0 = (ckey >> 16) & 0xFF;
2735 Uint8 k1 = (ckey >> 8) & 0xFF;
2736 Uint8 k2 = ckey & 0xFF;
2740 int alpha_channel, p0, p1, p2, p3;
2751 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2778 if ( (Pixel & rgbmask) != ckey ) {
2809 unsigned sR, sG, sB, sA;
2830 if ((*src32 & rgbmask) != ckey) {
2845#if HAVE_FAST_WRITE_INT8
2847 if (srcbpp == 4 && dstbpp == 4 &&
2860 if ((*src32 & rgbmask) != ckey) {
2882 if ( (Pixel & rgbmask) != ckey ) {
2908 unsigned sR, sG, sB, sA;
2940 unsigned sR, sG, sB, sA;
2974 if (dstfmt->
Amask) {
2977#if SDL_BYTEORDER == SDL_LIL_ENDIAN
2978 int i0 = 0,
i1 = 1,
i2 = 2;
2980 int i0 = srcbpp - 1 - 0;
2981 int i1 = srcbpp - 1 - 1;
2982 int i2 = srcbpp - 1 - 2;
2992 *dst32 = (
s0) | (
s1 << 8) | (s2 << 16) |
mask;
3002#if SDL_BYTEORDER == SDL_LIL_ENDIAN
3003 int i0 = 0,
i1 = 1,
i2 = 2;
3004 int j0 = 0,
j1 = 1, j2 = 2;
3006 int i0 = srcbpp - 1 - 0;
3007 int i1 = srcbpp - 1 - 1;
3008 int i2 = srcbpp - 1 - 2;
3009 int j0 = dstbpp - 1 - 0;
3010 int j1 = dstbpp - 1 - 1;
3011 int j2 = dstbpp - 1 - 2;
3049 if (dstfmt->
Amask) {
3050 if (srcfmt->
Amask) {
3054#if SDL_BYTEORDER == SDL_LIL_ENDIAN
3055 int i0 = 0,
i1 = 1,
i2 = 2, i3 = 3;
3057 int i0 = 3,
i1 = 2,
i2 = 1, i3 = 0;
3068 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) | alphashift;
3079#if SDL_BYTEORDER == SDL_LIL_ENDIAN
3080 int i0 = 0,
i1 = 1,
i2 = 2;
3082 int i0 = srcbpp - 1 - 0;
3083 int i1 = srcbpp - 1 - 1;
3084 int i2 = srcbpp - 1 - 2;
3095 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) |
mask;
3106#if SDL_BYTEORDER == SDL_LIL_ENDIAN
3107 int i0 = 0,
i1 = 1,
i2 = 2;
3108 int j0 = 2,
j1 = 1, j2 = 0;
3110 int i0 = srcbpp - 1 - 0;
3111 int i1 = srcbpp - 1 - 1;
3112 int i2 = srcbpp - 1 - 2;
3113 int j0 = dstbpp - 1 - 2;
3114 int j1 = dstbpp - 1 - 1;
3115 int j2 = dstbpp - 1 - 0;
3154 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3158#if SDL_ALTIVEC_BLITTERS
3160 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3162 {0x00007C00, 0x000003E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3165 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3167 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3169 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0xFF000000, 0x00FF0000, 0x0000FF00,
3171 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x0000FF00, 0x00FF0000, 0xFF000000,
3175 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3180 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3182#if HAVE_FAST_WRITE_INT8
3186 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3188#if HAVE_FAST_WRITE_INT8
3193 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3195#if HAVE_FAST_WRITE_INT8
3199 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3201#if HAVE_FAST_WRITE_INT8
3206 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3208 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3211 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3215#if SDL_ALTIVEC_BLITTERS
3217 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3220 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3223 {0x00000000, 0x00000000, 0x00000000, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3224 2, Blit_RGB888_RGB565Altivec,
NO_ALPHA},
3227 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3229 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3232 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3234 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3237 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3239#if HAVE_FAST_WRITE_INT8
3243 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3245#if HAVE_FAST_WRITE_INT8
3250 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3252 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x00007C00, 0x000003E0, 0x0000001F,
3255 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3263#define MASKOK(x, y) (((x) == (y)) || ((y) == 0x00000000))
3288 (srcfmt->
Rmask == 0x00FF0000) &&
3289 (srcfmt->
Gmask == 0x0000FF00) &&
3290 (srcfmt->
Bmask == 0x000000FF)) {
3293 (srcfmt->
Rmask == 0x3FF00000) &&
3294 (srcfmt->
Gmask == 0x000FFC00) &&
3295 (srcfmt->
Bmask == 0x000003FF)) {
3306 for (which = 0;
table[which].dstbpp; ++which) {
3314 (a_need &
table[which].alpha) == a_need &&
3359#if SDL_ALTIVEC_BLITTERS
3362 return Blit32to32KeyAltivec;
#define SDL_assert(condition)
#define ASSEMBLE_RGBA(buf, bpp, fmt, r, g, b, a)
#define DISEMBLE_RGB(buf, bpp, fmt, Pixel, r, g, b)
#define RGBA_FROM_8888(Pixel, fmt, r, g, b, a)
#define RETRIEVE_RGB_PIXEL(buf, bpp, Pixel)
#define SDL_COPY_RLE_MASK
#define RGB_FROM_PIXEL(Pixel, fmt, r, g, b)
#define DISEMBLE_RGBA(buf, bpp, fmt, Pixel, r, g, b, a)
#define ARGB2101010_FROM_RGBA(Pixel, r, g, b, a)
#define PIXEL_FROM_RGB(Pixel, fmt, r, g, b)
#define DUFFS_LOOP(pixel_copy_increment, width)
#define RGBA_FROM_PIXEL(Pixel, fmt, r, g, b, a)
#define SDL_COPY_COLORKEY
void(* SDL_BlitFunc)(SDL_BlitInfo *info)
#define RGBA_FROM_ARGB2101010(Pixel, r, g, b, a)
#define PIXEL_FROM_RGBA(Pixel, fmt, r, g, b, a)
static void BlitNtoN(SDL_BlitInfo *info)
#define RGB888_RGB555(dst, src)
#define RGB565_32(dst, src, map)
static void Blit_RGB565_ABGR8888(SDL_BlitInfo *info)
static void BlitNto1Key(SDL_BlitInfo *info)
static void Blit2to2Key(SDL_BlitInfo *info)
static void Blit_3or4_to_3or4__inversed_rgb(SDL_BlitInfo *info)
static void BlitNto1(SDL_BlitInfo *info)
SDL_BlitFunc SDL_CalculateBlitN(SDL_Surface *surface)
static void Blit_RGB888_RGB565(SDL_BlitInfo *info)
static void Blit4to4CopyAlpha(SDL_BlitInfo *info)
static const struct blit_table normal_blit_3[]
#define RGB888_RGB332(dst, src)
#define RGB101010_RGB332(dst, src)
#define RGB888_RGB565(dst, src)
static const Uint32 RGB565_ARGB8888_LUT[512]
static void Blit_RGB565_BGRA8888(SDL_BlitInfo *info)
static void Blit_RGB565_RGBA8888(SDL_BlitInfo *info)
static void Blit_3or4_to_3or4__same_rgb(SDL_BlitInfo *info)
static void Blit_RGB565_ARGB8888(SDL_BlitInfo *info)
#define GetBlitFeatures()
static void Blit_RGB888_RGB555(SDL_BlitInfo *info)
static void Blit4to4MaskAlpha(SDL_BlitInfo *info)
static void get_permutation(SDL_PixelFormat *srcfmt, SDL_PixelFormat *dstfmt, int *_p0, int *_p1, int *_p2, int *_p3, int *_alpha_channel)
static const struct blit_table normal_blit_1[]
static const struct blit_table normal_blit_2[]
static void Blit_RGB565_32(SDL_BlitInfo *info, const Uint32 *map)
static void Blit_RGB888_index8(SDL_BlitInfo *info)
static void Blit_RGB101010_index8(SDL_BlitInfo *info)
static const Uint32 RGB565_RGBA8888_LUT[512]
static const struct blit_table *const normal_blit[]
static void BlitNtoNKey(SDL_BlitInfo *info)
static const struct blit_table normal_blit_4[]
static void BlitNtoNCopyAlpha(SDL_BlitInfo *info)
static void BlitNto2101010(SDL_BlitInfo *info)
static void BlitNtoNKeyCopyAlpha(SDL_BlitInfo *info)
static const Uint32 RGB565_BGRA8888_LUT[512]
static const Uint32 RGB565_ABGR8888_LUT[512]
static void Blit2101010toN(SDL_BlitInfo *info)
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint GLint GLint j2 GLdouble GLdouble GLdouble GLdouble GLdouble GLdouble zFar GLenum GLenum GLint *params GLenum GLenum GLint *params GLenum GLenum GLint *params GLenum GLenum GLfloat *params GLenum GLint GLenum GLenum GLvoid *pixels GLenum GLint GLenum GLint *params GLenum GLenum GLint *params GLenum GLsizei const GLvoid *pointer GLenum GLenum const GLint *params GLenum GLfloat GLfloat GLint GLint const GLfloat *points GLenum GLfloat GLfloat GLint GLint GLfloat GLfloat GLint GLint const GLfloat *points GLint GLfloat GLfloat GLint GLfloat GLfloat v2 GLenum GLenum const GLint *params GLdouble GLdouble GLdouble GLdouble GLdouble GLdouble zFar GLenum map
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint i1
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint GLint j1
const GLubyte GLuint GLuint GLuint GLuint alpha GLboolean GLboolean GLboolean GLboolean alpha GLint GLint GLsizei GLsizei GLenum type GLenum GLint GLenum GLint GLint GLsizei GLsizei GLint border GLenum GLint GLint GLint GLint GLint GLsizei GLsizei height GLsizei GLsizei GLenum GLenum const GLvoid *pixels GLenum GLint GLint i2
GLint GLint GLsizei width
GLdouble GLdouble GLdouble r
GLint GLint GLsizei GLsizei height
GLboolean GLboolean GLboolean b
GLuint GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat s1
GLboolean GLboolean GLboolean GLboolean a
GLenum GLint GLenum GLsizei GLsizei GLsizei GLint GLsizei const void * bits
GLfloat GLfloat GLfloat alpha
GLfloat GLfloat GLfloat GLfloat v3
GLfloat GLfloat GLfloat v2
GLenum GLsizei GLenum GLenum const void * table
GLuint GLfloat GLfloat GLfloat GLfloat GLfloat GLfloat s0
@ SDL_PIXELFORMAT_RGBA8888
@ SDL_PIXELFORMAT_ABGR8888
@ SDL_PIXELFORMAT_BGRA8888
@ SDL_PIXELFORMAT_ARGB8888
@ SDL_PIXELFORMAT_ARGB2101010
SDL_PixelFormat * src_fmt
SDL_PixelFormat * dst_fmt
A collection of pixels used in software blitting.