21 #include "../SDL_internal.h"
33 #define HAVE_FAST_WRITE_INT8 1
37 # undef HAVE_FAST_WRITE_INT8
38 # define HAVE_FAST_WRITE_INT8 0
51 #if SDL_ALTIVEC_BLITTERS
56 #include <sys/sysctl.h>
60 const char key[] =
"hw.l3cachesize";
62 size_t typeSize =
sizeof(
result);
80 #if (defined(__MACOSX__) && (__GNUC__ < 4))
81 #define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
82 (vector unsigned char) ( a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p )
83 #define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
84 (vector unsigned short) ( a,b,c,d,e,f,g,h )
86 #define VECUINT8_LITERAL(a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p) \
87 (vector unsigned char) { a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p }
88 #define VECUINT16_LITERAL(a,b,c,d,e,f,g,h) \
89 (vector unsigned short) { a,b,c,d,e,f,g,h }
92 #define UNALIGNED_PTR(x) (((size_t) x) & 0x0000000F)
93 #define VSWIZZLE32(a,b,c,d) (vector unsigned char) \
94 ( 0x00+a, 0x00+b, 0x00+c, 0x00+d, \
95 0x04+a, 0x04+b, 0x04+c, 0x04+d, \
96 0x08+a, 0x08+b, 0x08+c, 0x08+d, \
97 0x0C+a, 0x0C+b, 0x0C+c, 0x0C+d )
99 #define MAKE8888(dstfmt, r, g, b, a) \
100 ( ((r<<dstfmt->Rshift)&dstfmt->Rmask) | \
101 ((g<<dstfmt->Gshift)&dstfmt->Gmask) | \
102 ((b<<dstfmt->Bshift)&dstfmt->Bmask) | \
103 ((a<<dstfmt->Ashift)&dstfmt->Amask) )
111 #define DST_CHAN_SRC 1
112 #define DST_CHAN_DEST 2
115 #define DST_CTRL(size, count, stride) \
116 (((size) << 24) | ((count) << 16) | (stride))
118 #define VEC_ALIGNER(src) ((UNALIGNED_PTR(src)) \
120 : vec_add(vec_lvsl(8, src), vec_splat_u8(8)))
123 static vector
unsigned char
135 0x00FF0000, 0x0000FF00, 0x000000FF, 0xFF000000,
140 const vector
unsigned char plus = VECUINT8_LITERAL(0x00, 0x00, 0x00, 0x00,
141 0x04, 0x04, 0x04, 0x04,
142 0x08, 0x08, 0x08, 0x08,
145 vector
unsigned char vswiz;
146 vector
unsigned int srcvec;
147 Uint32 rmask, gmask, bmask, amask;
150 srcfmt = &default_pixel_format;
153 dstfmt = &default_pixel_format;
156 #define RESHIFT(X) (3 - ((X) >> 3))
164 ((srcfmt->
Amask) ? RESHIFT(srcfmt->
173 ((
unsigned int *) (
char *) &srcvec)[0] = (rmask | gmask | bmask | amask);
174 vswiz = vec_add(plus, (vector
unsigned char) vec_splat(srcvec, 0));
178 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
180 static vector
unsigned char reorder_ppc64le_vec(vector
unsigned char vpermute)
194 const vector
unsigned char ppc64le_reorder = VECUINT8_LITERAL(
195 0x01, 0x00, 0x03, 0x02,
196 0x05, 0x04, 0x07, 0x06,
197 0x09, 0x08, 0x0B, 0x0A,
198 0x0D, 0x0C, 0x0F, 0x0E );
200 vector
unsigned char vswiz_ppc64le;
201 vswiz_ppc64le = vec_perm(vpermute, vpermute, ppc64le_reorder);
202 return(vswiz_ppc64le);
216 vector
unsigned char valpha = vec_splat_u8(0);
217 vector
unsigned char vpermute = calc_swizzle32(srcfmt,
NULL);
218 vector
unsigned char vgmerge = VECUINT8_LITERAL(0x00, 0x02, 0x00, 0x06,
219 0x00, 0x0a, 0x00, 0x0e,
220 0x00, 0x12, 0x00, 0x16,
221 0x00, 0x1a, 0x00, 0x1e);
222 vector
unsigned short v1 = vec_splat_u16(1);
223 vector
unsigned short v3 = vec_splat_u16(3);
224 vector
unsigned short v3f =
225 VECUINT16_LITERAL(0x003f, 0x003f, 0x003f, 0x003f,
226 0x003f, 0x003f, 0x003f, 0x003f);
227 vector
unsigned short vfc =
228 VECUINT16_LITERAL(0x00fc, 0x00fc, 0x00fc, 0x00fc,
229 0x00fc, 0x00fc, 0x00fc, 0x00fc);
230 vector
unsigned short vf800 = (vector
unsigned short) vec_splat_u8(-7);
231 vf800 = vec_sl(vf800, vec_splat_u16(8));
234 vector
unsigned char valigner;
235 vector
unsigned char voverflow;
236 vector
unsigned char vsrc;
242 #define ONE_PIXEL_BLEND(condition, widthvar) \
243 while (condition) { \
245 unsigned sR, sG, sB, sA; \
246 DISEMBLE_RGBA((Uint8 *)src, 4, srcfmt, Pixel, \
248 *(Uint16 *)(dst) = (((sR << 8) & 0x0000F800) | \
249 ((sG << 3) & 0x000007E0) | \
250 ((sB >> 3) & 0x0000001F)); \
259 extrawidth = (
width % 8);
261 vsrc = vec_ld(0,
src);
262 valigner = VEC_ALIGNER(
src);
265 vector
unsigned short vpixel, vrpixel, vgpixel, vbpixel;
266 vector
unsigned int vsrc1, vsrc2;
267 vector
unsigned char vdst;
269 voverflow = vec_ld(15,
src);
270 vsrc = vec_perm(vsrc, voverflow, valigner);
271 vsrc1 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
274 voverflow = vec_ld(15,
src);
275 vsrc = vec_perm(vsrc, voverflow, valigner);
276 vsrc2 = (vector
unsigned int) vec_perm(vsrc, valpha, vpermute);
278 vpixel = (vector
unsigned short) vec_packpx(vsrc1, vsrc2);
279 vgpixel = (vector
unsigned short) vec_perm(vsrc1, vsrc2, vgmerge);
280 vgpixel = vec_and(vgpixel, vfc);
281 vgpixel = vec_sl(vgpixel,
v3);
282 vrpixel = vec_sl(vpixel,
v1);
283 vrpixel = vec_and(vrpixel, vf800);
284 vbpixel = vec_and(vpixel, v3f);
286 vec_or((vector
unsigned char) vrpixel,
287 (vector
unsigned char) vgpixel);
289 vdst = vec_or(vdst, (vector
unsigned char) vbpixel);
290 vec_st(vdst, 0,
dst);
301 ONE_PIXEL_BLEND((extrawidth), extrawidth);
302 #undef ONE_PIXEL_BLEND
322 vector
unsigned char valpha;
323 vector
unsigned char vpermute;
324 vector
unsigned short vf800;
325 vector
unsigned int v8 = vec_splat_u32(8);
326 vector
unsigned int v16 = vec_add(v8, v8);
327 vector
unsigned short v2 = vec_splat_u16(2);
328 vector
unsigned short v3 = vec_splat_u16(3);
334 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
335 0x10, 0x02, 0x01, 0x01,
336 0x10, 0x04, 0x01, 0x01,
339 vector
unsigned char vredalpha2 =
341 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
347 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
348 0x04, 0x05, 0x06, 0x13,
349 0x08, 0x09, 0x0a, 0x15,
350 0x0c, 0x0d, 0x0e, 0x17);
351 vector
unsigned char vblue2 =
352 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
358 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
359 0x04, 0x05, 0x12, 0x07,
360 0x08, 0x09, 0x14, 0x0b,
361 0x0c, 0x0d, 0x16, 0x0f);
362 vector
unsigned char vgreen2 =
364 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
370 vf800 = (vector
unsigned short) vec_splat_u8(-7);
371 vf800 = vec_sl(vf800, vec_splat_u16(8));
373 if (dstfmt->
Amask && info->
a) {
374 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
375 valpha = vec_splat(valpha, 0);
378 valpha = vec_splat_u8(0);
381 vpermute = calc_swizzle32(
NULL, dstfmt);
383 vector
unsigned char valigner;
384 vector
unsigned char voverflow;
385 vector
unsigned char vsrc;
391 #define ONE_PIXEL_BLEND(condition, widthvar) \
392 while (condition) { \
393 unsigned sR, sG, sB; \
394 unsigned short Pixel = *((unsigned short *)src); \
395 sR = (Pixel >> 8) & 0xf8; \
396 sG = (Pixel >> 3) & 0xfc; \
397 sB = (Pixel << 3) & 0xf8; \
398 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
406 extrawidth = (
width % 8);
408 vsrc = vec_ld(0,
src);
409 valigner = VEC_ALIGNER(
src);
412 vector
unsigned short vR, vG, vB;
413 vector
unsigned char vdst1, vdst2;
415 voverflow = vec_ld(15,
src);
416 vsrc = vec_perm(vsrc, voverflow, valigner);
418 vR = vec_and((vector
unsigned short) vsrc, vf800);
419 vB = vec_sl((vector
unsigned short) vsrc,
v3);
423 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
425 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
426 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
427 vdst1 = vec_perm(vdst1, valpha, vpermute);
428 vec_st(vdst1, 0,
dst);
431 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
433 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
434 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
435 vdst2 = vec_perm(vdst2, valpha, vpermute);
436 vec_st(vdst2, 16,
dst);
448 ONE_PIXEL_BLEND((extrawidth), extrawidth);
449 #undef ONE_PIXEL_BLEND
469 vector
unsigned char valpha;
470 vector
unsigned char vpermute;
471 vector
unsigned short vf800;
472 vector
unsigned int v8 = vec_splat_u32(8);
473 vector
unsigned int v16 = vec_add(v8, v8);
474 vector
unsigned short v1 = vec_splat_u16(1);
475 vector
unsigned short v3 = vec_splat_u16(3);
481 vector
unsigned char vredalpha1 = VECUINT8_LITERAL(0x10, 0x00, 0x01, 0x01,
482 0x10, 0x02, 0x01, 0x01,
483 0x10, 0x04, 0x01, 0x01,
486 vector
unsigned char vredalpha2 =
488 char) (vec_add((vector
unsigned int) vredalpha1, vec_sl(v8, v16))
494 vector
unsigned char vblue1 = VECUINT8_LITERAL(0x00, 0x01, 0x02, 0x11,
495 0x04, 0x05, 0x06, 0x13,
496 0x08, 0x09, 0x0a, 0x15,
497 0x0c, 0x0d, 0x0e, 0x17);
498 vector
unsigned char vblue2 =
499 (vector
unsigned char) (vec_add((vector
unsigned int) vblue1, v8)
505 vector
unsigned char vgreen1 = VECUINT8_LITERAL(0x00, 0x01, 0x10, 0x03,
506 0x04, 0x05, 0x12, 0x07,
507 0x08, 0x09, 0x14, 0x0b,
508 0x0c, 0x0d, 0x16, 0x0f);
509 vector
unsigned char vgreen2 =
511 char) (vec_add((vector
unsigned int) vgreen1, vec_sl(v8, v8))
517 vf800 = (vector
unsigned short) vec_splat_u8(-7);
518 vf800 = vec_sl(vf800, vec_splat_u16(8));
520 if (dstfmt->
Amask && info->
a) {
521 ((
unsigned char *) &valpha)[0] =
alpha = info->
a;
522 valpha = vec_splat(valpha, 0);
525 valpha = vec_splat_u8(0);
528 vpermute = calc_swizzle32(
NULL, dstfmt);
530 vector
unsigned char valigner;
531 vector
unsigned char voverflow;
532 vector
unsigned char vsrc;
538 #define ONE_PIXEL_BLEND(condition, widthvar) \
539 while (condition) { \
540 unsigned sR, sG, sB; \
541 unsigned short Pixel = *((unsigned short *)src); \
542 sR = (Pixel >> 7) & 0xf8; \
543 sG = (Pixel >> 2) & 0xf8; \
544 sB = (Pixel << 3) & 0xf8; \
545 ASSEMBLE_RGBA(dst, 4, dstfmt, sR, sG, sB, alpha); \
553 extrawidth = (
width % 8);
555 vsrc = vec_ld(0,
src);
556 valigner = VEC_ALIGNER(
src);
559 vector
unsigned short vR, vG, vB;
560 vector
unsigned char vdst1, vdst2;
562 voverflow = vec_ld(15,
src);
563 vsrc = vec_perm(vsrc, voverflow, valigner);
565 vR = vec_and(vec_sl((vector
unsigned short) vsrc,
v1), vf800);
566 vB = vec_sl((vector
unsigned short) vsrc,
v3);
570 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
572 vdst1 = vec_perm(vdst1, (vector
unsigned char) vB, vblue1);
573 vdst1 = vec_perm(vdst1, (vector
unsigned char) vG, vgreen1);
574 vdst1 = vec_perm(vdst1, valpha, vpermute);
575 vec_st(vdst1, 0,
dst);
578 (vector
unsigned char) vec_perm((vector
unsigned char) vR,
580 vdst2 = vec_perm(vdst2, (vector
unsigned char) vB, vblue2);
581 vdst2 = vec_perm(vdst2, (vector
unsigned char) vG, vgreen2);
582 vdst2 = vec_perm(vdst2, valpha, vpermute);
583 vec_st(vdst2, 16,
dst);
595 ONE_PIXEL_BLEND((extrawidth), extrawidth);
596 #undef ONE_PIXEL_BLEND
618 int copy_alpha = (srcfmt->
Amask && dstfmt->
Amask);
622 vector
unsigned int valpha;
623 vector
unsigned char vpermute;
624 vector
unsigned char vzero;
625 vector
unsigned int vckey;
626 vector
unsigned int vrgbmask;
627 vpermute = calc_swizzle32(srcfmt, dstfmt);
628 if (info->
dst_w < 16) {
636 vzero = vec_splat_u8(0);
638 ((
unsigned char *) &valpha)[0] = (
unsigned char)
alpha;
640 (vector
unsigned int) vec_splat((vector
unsigned char) valpha, 0);
642 valpha = (vector
unsigned int) vzero;
645 ((
unsigned int *) (
char *) &vckey)[0] = ckey;
646 vckey = vec_splat(vckey, 0);
647 ((
unsigned int *) (
char *) &vrgbmask)[0] = rgbmask;
648 vrgbmask = vec_splat(vrgbmask, 0);
651 #define ONE_PIXEL_BLEND(condition, widthvar) \
653 while (condition) { \
655 unsigned sR, sG, sB, sA; \
656 DISEMBLE_RGBA((Uint8 *)srcp, srcbpp, srcfmt, Pixel, \
658 if ( (Pixel & rgbmask) != ckey ) { \
659 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
662 dstp = (Uint32 *) (((Uint8 *) dstp) + dstbpp); \
663 srcp = (Uint32 *) (((Uint8 *) srcp) + srcbpp); \
667 while (condition) { \
669 unsigned sR, sG, sB; \
670 RETRIEVE_RGB_PIXEL((Uint8 *)srcp, srcbpp, Pixel); \
671 if ( Pixel != ckey ) { \
672 RGB_FROM_PIXEL(Pixel, srcfmt, sR, sG, sB); \
673 ASSEMBLE_RGBA((Uint8 *)dstp, dstbpp, dstfmt, \
674 sR, sG, sB, alpha); \
676 dstp = (Uint32 *) (((Uint8 *)dstp) + dstbpp); \
677 srcp = (Uint32 *) (((Uint8 *)srcp) + srcbpp); \
682 ONE_PIXEL_BLEND((UNALIGNED_PTR(dstp)) && (
width),
width);
685 int extrawidth = (
width % 4);
686 vector
unsigned char valigner = VEC_ALIGNER(srcp);
687 vector
unsigned int vs = vec_ld(0, srcp);
691 vector
unsigned char vsel;
692 vector
unsigned int vd;
693 vector
unsigned int voverflow = vec_ld(15, srcp);
695 vs = vec_perm(vs, voverflow, valigner);
697 vsel = (vector
unsigned char) vec_and(vs, vrgbmask);
698 vsel = (vector
unsigned char) vec_cmpeq(vs, vckey);
699 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
701 vpermute = reorder_ppc64le_vec(vpermute);
704 vs = vec_perm(vs, valpha, vpermute);
706 vd = vec_ld(0, dstp);
708 vd = (vector
unsigned int) vec_sel((vector
unsigned char) vs,
709 (vector
unsigned char) vd,
718 ONE_PIXEL_BLEND((extrawidth), extrawidth);
719 #undef ONE_PIXEL_BLEND
738 vector
unsigned int vzero = vec_splat_u32(0);
739 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
742 vector
unsigned char valpha;
743 ((
unsigned char *) &valpha)[0] = info->
a;
744 vzero = (vector
unsigned int) vec_splat(valpha, 0);
752 vector
unsigned char valigner;
753 vector
unsigned int vbits;
754 vector
unsigned int voverflow;
762 while ((UNALIGNED_PTR(
dst)) && (
width)) {
767 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
772 extrawidth = (
width % 4);
774 valigner = VEC_ALIGNER(
src);
775 vbits = vec_ld(0,
src);
778 voverflow = vec_ld(15,
src);
781 vbits = vec_perm(vbits, voverflow, valigner);
782 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
784 vpermute = reorder_ppc64le_vec(vpermute);
786 vbits = vec_perm(vbits, vzero, vpermute);
787 vec_st(vbits, 0,
dst);
800 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
815 const int scalar_dst_lead =
sizeof(
Uint32) * 4;
816 const int vector_dst_lead =
sizeof(
Uint32) * 16;
825 vector
unsigned int vzero = vec_splat_u32(0);
826 vector
unsigned char vpermute = calc_swizzle32(srcfmt, dstfmt);
829 vector
unsigned char valpha;
830 ((
unsigned char *) &valpha)[0] = info->
a;
831 vzero = (vector
unsigned int) vec_splat(valpha, 0);
839 vector
unsigned char valigner;
840 vector
unsigned int vbits;
841 vector
unsigned int voverflow;
849 while ((UNALIGNED_PTR(
dst)) && (
width)) {
850 vec_dstt(
src + scalar_dst_lead, DST_CTRL(2, 32, 1024),
852 vec_dstst(
dst + scalar_dst_lead, DST_CTRL(2, 32, 1024),
858 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
863 extrawidth = (
width % 4);
865 valigner = VEC_ALIGNER(
src);
866 vbits = vec_ld(0,
src);
869 vec_dstt(
src + vector_dst_lead, DST_CTRL(2, 32, 1024),
871 vec_dstst(
dst + vector_dst_lead, DST_CTRL(2, 32, 1024),
873 voverflow = vec_ld(15,
src);
876 vbits = vec_perm(vbits, voverflow, valigner);
877 #if defined(__powerpc__) && (SDL_BYTEORDER == SDL_LIL_ENDIAN)
879 vpermute = reorder_ppc64le_vec(vpermute);
881 vbits = vec_perm(vbits, vzero, vpermute);
882 vec_st(vbits, 0,
dst);
895 *(
dst++) = MAKE8888(dstfmt,
r,
g,
b,
a);
903 vec_dss(DST_CHAN_SRC);
904 vec_dss(DST_CHAN_DEST);
913 char *
override =
SDL_getenv(
"SDL_ALTIVEC_BLIT_FEATURES");
915 unsigned int features_as_uint = 0;
916 SDL_sscanf(
override,
"%u", &features_as_uint);
934 #pragma altivec_model off
938 #define GetBlitFeatures() ((SDL_HasMMX() ? BLIT_FEATURE_HAS_MMX : 0) | (SDL_HasARMSIMD() ? BLIT_FEATURE_HAS_ARM_SIMD : 0))
941 #if SDL_ARM_SIMD_BLITTERS
954 Blit_BGR888_RGB888ARMSIMDAsm(
width,
height, dstp, dststride, srcp, srcstride);
969 Blit_RGB444_RGB888ARMSIMDAsm(
width,
height, dstp, dststride, srcp, srcstride);
974 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
983 #define RGB888_RGB332(dst, src) { \
984 dst = (Uint8)((((src)&0x00E00000)>>16)| \
985 (((src)&0x0000E000)>>11)| \
986 (((src)&0x000000C0)>>6)); \
991 #ifndef USE_DUFFS_LOOP
998 int srcskip, dstskip;
1011 #ifdef USE_DUFFS_LOOP
1028 switch (
width & 3) {
1047 #ifdef USE_DUFFS_LOOP
1071 switch (
width & 3) {
1093 #define RGB101010_RGB332(dst, src) { \
1094 dst = (Uint8)((((src)&0x38000000)>>22)| \
1095 (((src)&0x000E0000)>>15)| \
1096 (((src)&0x00000300)>>8)); \
1101 #ifndef USE_DUFFS_LOOP
1108 int srcskip, dstskip;
1121 #ifdef USE_DUFFS_LOOP
1138 switch (
width & 3) {
1157 #ifdef USE_DUFFS_LOOP
1181 switch (
width & 3) {
1203 #define RGB888_RGB555(dst, src) { \
1204 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>9)| \
1205 (((*src)&0x0000F800)>>6)| \
1206 (((*src)&0x000000F8)>>3)); \
1208 #ifndef USE_DUFFS_LOOP
1209 #define RGB888_RGB555_TWO(dst, src) { \
1210 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>9)| \
1211 (((src[HI])&0x0000F800)>>6)| \
1212 (((src[HI])&0x000000F8)>>3))<<16)| \
1213 (((src[LO])&0x00F80000)>>9)| \
1214 (((src[LO])&0x0000F800)>>6)| \
1215 (((src[LO])&0x000000F8)>>3); \
1221 #ifndef USE_DUFFS_LOOP
1227 int srcskip, dstskip;
1237 #ifdef USE_DUFFS_LOOP
1251 if ((
long)
dst & 0x03) {
1266 RGB888_RGB555_TWO(
dst,
src);
1269 RGB888_RGB555_TWO(
dst,
src);
1274 switch (
width & 3) {
1280 RGB888_RGB555_TWO(
dst,
src);
1297 RGB888_RGB555_TWO(
dst,
src);
1300 RGB888_RGB555_TWO(
dst,
src);
1305 switch (
width & 3) {
1311 RGB888_RGB555_TWO(
dst,
src);
1329 #define RGB888_RGB565(dst, src) { \
1330 *(Uint16 *)(dst) = (Uint16)((((*src)&0x00F80000)>>8)| \
1331 (((*src)&0x0000FC00)>>5)| \
1332 (((*src)&0x000000F8)>>3)); \
1334 #ifndef USE_DUFFS_LOOP
1335 #define RGB888_RGB565_TWO(dst, src) { \
1336 *(Uint32 *)(dst) = (((((src[HI])&0x00F80000)>>8)| \
1337 (((src[HI])&0x0000FC00)>>5)| \
1338 (((src[HI])&0x000000F8)>>3))<<16)| \
1339 (((src[LO])&0x00F80000)>>8)| \
1340 (((src[LO])&0x0000FC00)>>5)| \
1341 (((src[LO])&0x000000F8)>>3); \
1347 #ifndef USE_DUFFS_LOOP
1353 int srcskip, dstskip;
1363 #ifdef USE_DUFFS_LOOP
1377 if ((
long)
dst & 0x03) {
1392 RGB888_RGB565_TWO(
dst,
src);
1395 RGB888_RGB565_TWO(
dst,
src);
1400 switch (
width & 3) {
1406 RGB888_RGB565_TWO(
dst,
src);
1423 RGB888_RGB565_TWO(
dst,
src);
1426 RGB888_RGB565_TWO(
dst,
src);
1431 switch (
width & 3) {
1437 RGB888_RGB565_TWO(
dst,
src);
1455 #if SDL_HAVE_BLIT_N_RGB565
1458 #define RGB565_32(dst, src, map) (map[src[LO]*2] + map[src[HI]*2+1])
1462 #ifndef USE_DUFFS_LOOP
1468 int srcskip, dstskip;
1505 switch (
width & 3) {
1525 0x00000000, 0xff000000, 0x00000008, 0xff002000,
1526 0x00000010, 0xff004000, 0x00000018, 0xff006100,
1527 0x00000020, 0xff008100, 0x00000029, 0xff00a100,
1528 0x00000031, 0xff00c200, 0x00000039, 0xff00e200,
1529 0x00000041, 0xff080000, 0x0000004a, 0xff082000,
1530 0x00000052, 0xff084000, 0x0000005a, 0xff086100,
1531 0x00000062, 0xff088100, 0x0000006a, 0xff08a100,
1532 0x00000073, 0xff08c200, 0x0000007b, 0xff08e200,
1533 0x00000083, 0xff100000, 0x0000008b, 0xff102000,
1534 0x00000094, 0xff104000, 0x0000009c, 0xff106100,
1535 0x000000a4, 0xff108100, 0x000000ac, 0xff10a100,
1536 0x000000b4, 0xff10c200, 0x000000bd, 0xff10e200,
1537 0x000000c5, 0xff180000, 0x000000cd, 0xff182000,
1538 0x000000d5, 0xff184000, 0x000000de, 0xff186100,
1539 0x000000e6, 0xff188100, 0x000000ee, 0xff18a100,
1540 0x000000f6, 0xff18c200, 0x000000ff, 0xff18e200,
1541 0x00000400, 0xff200000, 0x00000408, 0xff202000,
1542 0x00000410, 0xff204000, 0x00000418, 0xff206100,
1543 0x00000420, 0xff208100, 0x00000429, 0xff20a100,
1544 0x00000431, 0xff20c200, 0x00000439, 0xff20e200,
1545 0x00000441, 0xff290000, 0x0000044a, 0xff292000,
1546 0x00000452, 0xff294000, 0x0000045a, 0xff296100,
1547 0x00000462, 0xff298100, 0x0000046a, 0xff29a100,
1548 0x00000473, 0xff29c200, 0x0000047b, 0xff29e200,
1549 0x00000483, 0xff310000, 0x0000048b, 0xff312000,
1550 0x00000494, 0xff314000, 0x0000049c, 0xff316100,
1551 0x000004a4, 0xff318100, 0x000004ac, 0xff31a100,
1552 0x000004b4, 0xff31c200, 0x000004bd, 0xff31e200,
1553 0x000004c5, 0xff390000, 0x000004cd, 0xff392000,
1554 0x000004d5, 0xff394000, 0x000004de, 0xff396100,
1555 0x000004e6, 0xff398100, 0x000004ee, 0xff39a100,
1556 0x000004f6, 0xff39c200, 0x000004ff, 0xff39e200,
1557 0x00000800, 0xff410000, 0x00000808, 0xff412000,
1558 0x00000810, 0xff414000, 0x00000818, 0xff416100,
1559 0x00000820, 0xff418100, 0x00000829, 0xff41a100,
1560 0x00000831, 0xff41c200, 0x00000839, 0xff41e200,
1561 0x00000841, 0xff4a0000, 0x0000084a, 0xff4a2000,
1562 0x00000852, 0xff4a4000, 0x0000085a, 0xff4a6100,
1563 0x00000862, 0xff4a8100, 0x0000086a, 0xff4aa100,
1564 0x00000873, 0xff4ac200, 0x0000087b, 0xff4ae200,
1565 0x00000883, 0xff520000, 0x0000088b, 0xff522000,
1566 0x00000894, 0xff524000, 0x0000089c, 0xff526100,
1567 0x000008a4, 0xff528100, 0x000008ac, 0xff52a100,
1568 0x000008b4, 0xff52c200, 0x000008bd, 0xff52e200,
1569 0x000008c5, 0xff5a0000, 0x000008cd, 0xff5a2000,
1570 0x000008d5, 0xff5a4000, 0x000008de, 0xff5a6100,
1571 0x000008e6, 0xff5a8100, 0x000008ee, 0xff5aa100,
1572 0x000008f6, 0xff5ac200, 0x000008ff, 0xff5ae200,
1573 0x00000c00, 0xff620000, 0x00000c08, 0xff622000,
1574 0x00000c10, 0xff624000, 0x00000c18, 0xff626100,
1575 0x00000c20, 0xff628100, 0x00000c29, 0xff62a100,
1576 0x00000c31, 0xff62c200, 0x00000c39, 0xff62e200,
1577 0x00000c41, 0xff6a0000, 0x00000c4a, 0xff6a2000,
1578 0x00000c52, 0xff6a4000, 0x00000c5a, 0xff6a6100,
1579 0x00000c62, 0xff6a8100, 0x00000c6a, 0xff6aa100,
1580 0x00000c73, 0xff6ac200, 0x00000c7b, 0xff6ae200,
1581 0x00000c83, 0xff730000, 0x00000c8b, 0xff732000,
1582 0x00000c94, 0xff734000, 0x00000c9c, 0xff736100,
1583 0x00000ca4, 0xff738100, 0x00000cac, 0xff73a100,
1584 0x00000cb4, 0xff73c200, 0x00000cbd, 0xff73e200,
1585 0x00000cc5, 0xff7b0000, 0x00000ccd, 0xff7b2000,
1586 0x00000cd5, 0xff7b4000, 0x00000cde, 0xff7b6100,
1587 0x00000ce6, 0xff7b8100, 0x00000cee, 0xff7ba100,
1588 0x00000cf6, 0xff7bc200, 0x00000cff, 0xff7be200,
1589 0x00001000, 0xff830000, 0x00001008, 0xff832000,
1590 0x00001010, 0xff834000, 0x00001018, 0xff836100,
1591 0x00001020, 0xff838100, 0x00001029, 0xff83a100,
1592 0x00001031, 0xff83c200, 0x00001039, 0xff83e200,
1593 0x00001041, 0xff8b0000, 0x0000104a, 0xff8b2000,
1594 0x00001052, 0xff8b4000, 0x0000105a, 0xff8b6100,
1595 0x00001062, 0xff8b8100, 0x0000106a, 0xff8ba100,
1596 0x00001073, 0xff8bc200, 0x0000107b, 0xff8be200,
1597 0x00001083, 0xff940000, 0x0000108b, 0xff942000,
1598 0x00001094, 0xff944000, 0x0000109c, 0xff946100,
1599 0x000010a4, 0xff948100, 0x000010ac, 0xff94a100,
1600 0x000010b4, 0xff94c200, 0x000010bd, 0xff94e200,
1601 0x000010c5, 0xff9c0000, 0x000010cd, 0xff9c2000,
1602 0x000010d5, 0xff9c4000, 0x000010de, 0xff9c6100,
1603 0x000010e6, 0xff9c8100, 0x000010ee, 0xff9ca100,
1604 0x000010f6, 0xff9cc200, 0x000010ff, 0xff9ce200,
1605 0x00001400, 0xffa40000, 0x00001408, 0xffa42000,
1606 0x00001410, 0xffa44000, 0x00001418, 0xffa46100,
1607 0x00001420, 0xffa48100, 0x00001429, 0xffa4a100,
1608 0x00001431, 0xffa4c200, 0x00001439, 0xffa4e200,
1609 0x00001441, 0xffac0000, 0x0000144a, 0xffac2000,
1610 0x00001452, 0xffac4000, 0x0000145a, 0xffac6100,
1611 0x00001462, 0xffac8100, 0x0000146a, 0xffaca100,
1612 0x00001473, 0xffacc200, 0x0000147b, 0xfface200,
1613 0x00001483, 0xffb40000, 0x0000148b, 0xffb42000,
1614 0x00001494, 0xffb44000, 0x0000149c, 0xffb46100,
1615 0x000014a4, 0xffb48100, 0x000014ac, 0xffb4a100,
1616 0x000014b4, 0xffb4c200, 0x000014bd, 0xffb4e200,
1617 0x000014c5, 0xffbd0000, 0x000014cd, 0xffbd2000,
1618 0x000014d5, 0xffbd4000, 0x000014de, 0xffbd6100,
1619 0x000014e6, 0xffbd8100, 0x000014ee, 0xffbda100,
1620 0x000014f6, 0xffbdc200, 0x000014ff, 0xffbde200,
1621 0x00001800, 0xffc50000, 0x00001808, 0xffc52000,
1622 0x00001810, 0xffc54000, 0x00001818, 0xffc56100,
1623 0x00001820, 0xffc58100, 0x00001829, 0xffc5a100,
1624 0x00001831, 0xffc5c200, 0x00001839, 0xffc5e200,
1625 0x00001841, 0xffcd0000, 0x0000184a, 0xffcd2000,
1626 0x00001852, 0xffcd4000, 0x0000185a, 0xffcd6100,
1627 0x00001862, 0xffcd8100, 0x0000186a, 0xffcda100,
1628 0x00001873, 0xffcdc200, 0x0000187b, 0xffcde200,
1629 0x00001883, 0xffd50000, 0x0000188b, 0xffd52000,
1630 0x00001894, 0xffd54000, 0x0000189c, 0xffd56100,
1631 0x000018a4, 0xffd58100, 0x000018ac, 0xffd5a100,
1632 0x000018b4, 0xffd5c200, 0x000018bd, 0xffd5e200,
1633 0x000018c5, 0xffde0000, 0x000018cd, 0xffde2000,
1634 0x000018d5, 0xffde4000, 0x000018de, 0xffde6100,
1635 0x000018e6, 0xffde8100, 0x000018ee, 0xffdea100,
1636 0x000018f6, 0xffdec200, 0x000018ff, 0xffdee200,
1637 0x00001c00, 0xffe60000, 0x00001c08, 0xffe62000,
1638 0x00001c10, 0xffe64000, 0x00001c18, 0xffe66100,
1639 0x00001c20, 0xffe68100, 0x00001c29, 0xffe6a100,
1640 0x00001c31, 0xffe6c200, 0x00001c39, 0xffe6e200,
1641 0x00001c41, 0xffee0000, 0x00001c4a, 0xffee2000,
1642 0x00001c52, 0xffee4000, 0x00001c5a, 0xffee6100,
1643 0x00001c62, 0xffee8100, 0x00001c6a, 0xffeea100,
1644 0x00001c73, 0xffeec200, 0x00001c7b, 0xffeee200,
1645 0x00001c83, 0xfff60000, 0x00001c8b, 0xfff62000,
1646 0x00001c94, 0xfff64000, 0x00001c9c, 0xfff66100,
1647 0x00001ca4, 0xfff68100, 0x00001cac, 0xfff6a100,
1648 0x00001cb4, 0xfff6c200, 0x00001cbd, 0xfff6e200,
1649 0x00001cc5, 0xffff0000, 0x00001ccd, 0xffff2000,
1650 0x00001cd5, 0xffff4000, 0x00001cde, 0xffff6100,
1651 0x00001ce6, 0xffff8100, 0x00001cee, 0xffffa100,
1652 0x00001cf6, 0xffffc200, 0x00001cff, 0xffffe200
1663 0xff000000, 0x00000000, 0xff080000, 0x00002000,
1664 0xff100000, 0x00004000, 0xff180000, 0x00006100,
1665 0xff200000, 0x00008100, 0xff290000, 0x0000a100,
1666 0xff310000, 0x0000c200, 0xff390000, 0x0000e200,
1667 0xff410000, 0x00000008, 0xff4a0000, 0x00002008,
1668 0xff520000, 0x00004008, 0xff5a0000, 0x00006108,
1669 0xff620000, 0x00008108, 0xff6a0000, 0x0000a108,
1670 0xff730000, 0x0000c208, 0xff7b0000, 0x0000e208,
1671 0xff830000, 0x00000010, 0xff8b0000, 0x00002010,
1672 0xff940000, 0x00004010, 0xff9c0000, 0x00006110,
1673 0xffa40000, 0x00008110, 0xffac0000, 0x0000a110,
1674 0xffb40000, 0x0000c210, 0xffbd0000, 0x0000e210,
1675 0xffc50000, 0x00000018, 0xffcd0000, 0x00002018,
1676 0xffd50000, 0x00004018, 0xffde0000, 0x00006118,
1677 0xffe60000, 0x00008118, 0xffee0000, 0x0000a118,
1678 0xfff60000, 0x0000c218, 0xffff0000, 0x0000e218,
1679 0xff000400, 0x00000020, 0xff080400, 0x00002020,
1680 0xff100400, 0x00004020, 0xff180400, 0x00006120,
1681 0xff200400, 0x00008120, 0xff290400, 0x0000a120,
1682 0xff310400, 0x0000c220, 0xff390400, 0x0000e220,
1683 0xff410400, 0x00000029, 0xff4a0400, 0x00002029,
1684 0xff520400, 0x00004029, 0xff5a0400, 0x00006129,
1685 0xff620400, 0x00008129, 0xff6a0400, 0x0000a129,
1686 0xff730400, 0x0000c229, 0xff7b0400, 0x0000e229,
1687 0xff830400, 0x00000031, 0xff8b0400, 0x00002031,
1688 0xff940400, 0x00004031, 0xff9c0400, 0x00006131,
1689 0xffa40400, 0x00008131, 0xffac0400, 0x0000a131,
1690 0xffb40400, 0x0000c231, 0xffbd0400, 0x0000e231,
1691 0xffc50400, 0x00000039, 0xffcd0400, 0x00002039,
1692 0xffd50400, 0x00004039, 0xffde0400, 0x00006139,
1693 0xffe60400, 0x00008139, 0xffee0400, 0x0000a139,
1694 0xfff60400, 0x0000c239, 0xffff0400, 0x0000e239,
1695 0xff000800, 0x00000041, 0xff080800, 0x00002041,
1696 0xff100800, 0x00004041, 0xff180800, 0x00006141,
1697 0xff200800, 0x00008141, 0xff290800, 0x0000a141,
1698 0xff310800, 0x0000c241, 0xff390800, 0x0000e241,
1699 0xff410800, 0x0000004a, 0xff4a0800, 0x0000204a,
1700 0xff520800, 0x0000404a, 0xff5a0800, 0x0000614a,
1701 0xff620800, 0x0000814a, 0xff6a0800, 0x0000a14a,
1702 0xff730800, 0x0000c24a, 0xff7b0800, 0x0000e24a,
1703 0xff830800, 0x00000052, 0xff8b0800, 0x00002052,
1704 0xff940800, 0x00004052, 0xff9c0800, 0x00006152,
1705 0xffa40800, 0x00008152, 0xffac0800, 0x0000a152,
1706 0xffb40800, 0x0000c252, 0xffbd0800, 0x0000e252,
1707 0xffc50800, 0x0000005a, 0xffcd0800, 0x0000205a,
1708 0xffd50800, 0x0000405a, 0xffde0800, 0x0000615a,
1709 0xffe60800, 0x0000815a, 0xffee0800, 0x0000a15a,
1710 0xfff60800, 0x0000c25a, 0xffff0800, 0x0000e25a,
1711 0xff000c00, 0x00000062, 0xff080c00, 0x00002062,
1712 0xff100c00, 0x00004062, 0xff180c00, 0x00006162,
1713 0xff200c00, 0x00008162, 0xff290c00, 0x0000a162,
1714 0xff310c00, 0x0000c262, 0xff390c00, 0x0000e262,
1715 0xff410c00, 0x0000006a, 0xff4a0c00, 0x0000206a,
1716 0xff520c00, 0x0000406a, 0xff5a0c00, 0x0000616a,
1717 0xff620c00, 0x0000816a, 0xff6a0c00, 0x0000a16a,
1718 0xff730c00, 0x0000c26a, 0xff7b0c00, 0x0000e26a,
1719 0xff830c00, 0x00000073, 0xff8b0c00, 0x00002073,
1720 0xff940c00, 0x00004073, 0xff9c0c00, 0x00006173,
1721 0xffa40c00, 0x00008173, 0xffac0c00, 0x0000a173,
1722 0xffb40c00, 0x0000c273, 0xffbd0c00, 0x0000e273,
1723 0xffc50c00, 0x0000007b, 0xffcd0c00, 0x0000207b,
1724 0xffd50c00, 0x0000407b, 0xffde0c00, 0x0000617b,
1725 0xffe60c00, 0x0000817b, 0xffee0c00, 0x0000a17b,
1726 0xfff60c00, 0x0000c27b, 0xffff0c00, 0x0000e27b,
1727 0xff001000, 0x00000083, 0xff081000, 0x00002083,
1728 0xff101000, 0x00004083, 0xff181000, 0x00006183,
1729 0xff201000, 0x00008183, 0xff291000, 0x0000a183,
1730 0xff311000, 0x0000c283, 0xff391000, 0x0000e283,
1731 0xff411000, 0x0000008b, 0xff4a1000, 0x0000208b,
1732 0xff521000, 0x0000408b, 0xff5a1000, 0x0000618b,
1733 0xff621000, 0x0000818b, 0xff6a1000, 0x0000a18b,
1734 0xff731000, 0x0000c28b, 0xff7b1000, 0x0000e28b,
1735 0xff831000, 0x00000094, 0xff8b1000, 0x00002094,
1736 0xff941000, 0x00004094, 0xff9c1000, 0x00006194,
1737 0xffa41000, 0x00008194, 0xffac1000, 0x0000a194,
1738 0xffb41000, 0x0000c294, 0xffbd1000, 0x0000e294,
1739 0xffc51000, 0x0000009c, 0xffcd1000, 0x0000209c,
1740 0xffd51000, 0x0000409c, 0xffde1000, 0x0000619c,
1741 0xffe61000, 0x0000819c, 0xffee1000, 0x0000a19c,
1742 0xfff61000, 0x0000c29c, 0xffff1000, 0x0000e29c,
1743 0xff001400, 0x000000a4, 0xff081400, 0x000020a4,
1744 0xff101400, 0x000040a4, 0xff181400, 0x000061a4,
1745 0xff201400, 0x000081a4, 0xff291400, 0x0000a1a4,
1746 0xff311400, 0x0000c2a4, 0xff391400, 0x0000e2a4,
1747 0xff411400, 0x000000ac, 0xff4a1400, 0x000020ac,
1748 0xff521400, 0x000040ac, 0xff5a1400, 0x000061ac,
1749 0xff621400, 0x000081ac, 0xff6a1400, 0x0000a1ac,
1750 0xff731400, 0x0000c2ac, 0xff7b1400, 0x0000e2ac,
1751 0xff831400, 0x000000b4, 0xff8b1400, 0x000020b4,
1752 0xff941400, 0x000040b4, 0xff9c1400, 0x000061b4,
1753 0xffa41400, 0x000081b4, 0xffac1400, 0x0000a1b4,
1754 0xffb41400, 0x0000c2b4, 0xffbd1400, 0x0000e2b4,
1755 0xffc51400, 0x000000bd, 0xffcd1400, 0x000020bd,
1756 0xffd51400, 0x000040bd, 0xffde1400, 0x000061bd,
1757 0xffe61400, 0x000081bd, 0xffee1400, 0x0000a1bd,
1758 0xfff61400, 0x0000c2bd, 0xffff1400, 0x0000e2bd,
1759 0xff001800, 0x000000c5, 0xff081800, 0x000020c5,
1760 0xff101800, 0x000040c5, 0xff181800, 0x000061c5,
1761 0xff201800, 0x000081c5, 0xff291800, 0x0000a1c5,
1762 0xff311800, 0x0000c2c5, 0xff391800, 0x0000e2c5,
1763 0xff411800, 0x000000cd, 0xff4a1800, 0x000020cd,
1764 0xff521800, 0x000040cd, 0xff5a1800, 0x000061cd,
1765 0xff621800, 0x000081cd, 0xff6a1800, 0x0000a1cd,
1766 0xff731800, 0x0000c2cd, 0xff7b1800, 0x0000e2cd,
1767 0xff831800, 0x000000d5, 0xff8b1800, 0x000020d5,
1768 0xff941800, 0x000040d5, 0xff9c1800, 0x000061d5,
1769 0xffa41800, 0x000081d5, 0xffac1800, 0x0000a1d5,
1770 0xffb41800, 0x0000c2d5, 0xffbd1800, 0x0000e2d5,
1771 0xffc51800, 0x000000de, 0xffcd1800, 0x000020de,
1772 0xffd51800, 0x000040de, 0xffde1800, 0x000061de,
1773 0xffe61800, 0x000081de, 0xffee1800, 0x0000a1de,
1774 0xfff61800, 0x0000c2de, 0xffff1800, 0x0000e2de,
1775 0xff001c00, 0x000000e6, 0xff081c00, 0x000020e6,
1776 0xff101c00, 0x000040e6, 0xff181c00, 0x000061e6,
1777 0xff201c00, 0x000081e6, 0xff291c00, 0x0000a1e6,
1778 0xff311c00, 0x0000c2e6, 0xff391c00, 0x0000e2e6,
1779 0xff411c00, 0x000000ee, 0xff4a1c00, 0x000020ee,
1780 0xff521c00, 0x000040ee, 0xff5a1c00, 0x000061ee,
1781 0xff621c00, 0x000081ee, 0xff6a1c00, 0x0000a1ee,
1782 0xff731c00, 0x0000c2ee, 0xff7b1c00, 0x0000e2ee,
1783 0xff831c00, 0x000000f6, 0xff8b1c00, 0x000020f6,
1784 0xff941c00, 0x000040f6, 0xff9c1c00, 0x000061f6,
1785 0xffa41c00, 0x000081f6, 0xffac1c00, 0x0000a1f6,
1786 0xffb41c00, 0x0000c2f6, 0xffbd1c00, 0x0000e2f6,
1787 0xffc51c00, 0x000000ff, 0xffcd1c00, 0x000020ff,
1788 0xffd51c00, 0x000040ff, 0xffde1c00, 0x000061ff,
1789 0xffe61c00, 0x000081ff, 0xffee1c00, 0x0000a1ff,
1790 0xfff61c00, 0x0000c2ff, 0xffff1c00, 0x0000e2ff
1801 0x000000ff, 0x00000000, 0x000008ff, 0x00200000,
1802 0x000010ff, 0x00400000, 0x000018ff, 0x00610000,
1803 0x000020ff, 0x00810000, 0x000029ff, 0x00a10000,
1804 0x000031ff, 0x00c20000, 0x000039ff, 0x00e20000,
1805 0x000041ff, 0x08000000, 0x00004aff, 0x08200000,
1806 0x000052ff, 0x08400000, 0x00005aff, 0x08610000,
1807 0x000062ff, 0x08810000, 0x00006aff, 0x08a10000,
1808 0x000073ff, 0x08c20000, 0x00007bff, 0x08e20000,
1809 0x000083ff, 0x10000000, 0x00008bff, 0x10200000,
1810 0x000094ff, 0x10400000, 0x00009cff, 0x10610000,
1811 0x0000a4ff, 0x10810000, 0x0000acff, 0x10a10000,
1812 0x0000b4ff, 0x10c20000, 0x0000bdff, 0x10e20000,
1813 0x0000c5ff, 0x18000000, 0x0000cdff, 0x18200000,
1814 0x0000d5ff, 0x18400000, 0x0000deff, 0x18610000,
1815 0x0000e6ff, 0x18810000, 0x0000eeff, 0x18a10000,
1816 0x0000f6ff, 0x18c20000, 0x0000ffff, 0x18e20000,
1817 0x000400ff, 0x20000000, 0x000408ff, 0x20200000,
1818 0x000410ff, 0x20400000, 0x000418ff, 0x20610000,
1819 0x000420ff, 0x20810000, 0x000429ff, 0x20a10000,
1820 0x000431ff, 0x20c20000, 0x000439ff, 0x20e20000,
1821 0x000441ff, 0x29000000, 0x00044aff, 0x29200000,
1822 0x000452ff, 0x29400000, 0x00045aff, 0x29610000,
1823 0x000462ff, 0x29810000, 0x00046aff, 0x29a10000,
1824 0x000473ff, 0x29c20000, 0x00047bff, 0x29e20000,
1825 0x000483ff, 0x31000000, 0x00048bff, 0x31200000,
1826 0x000494ff, 0x31400000, 0x00049cff, 0x31610000,
1827 0x0004a4ff, 0x31810000, 0x0004acff, 0x31a10000,
1828 0x0004b4ff, 0x31c20000, 0x0004bdff, 0x31e20000,
1829 0x0004c5ff, 0x39000000, 0x0004cdff, 0x39200000,
1830 0x0004d5ff, 0x39400000, 0x0004deff, 0x39610000,
1831 0x0004e6ff, 0x39810000, 0x0004eeff, 0x39a10000,
1832 0x0004f6ff, 0x39c20000, 0x0004ffff, 0x39e20000,
1833 0x000800ff, 0x41000000, 0x000808ff, 0x41200000,
1834 0x000810ff, 0x41400000, 0x000818ff, 0x41610000,
1835 0x000820ff, 0x41810000, 0x000829ff, 0x41a10000,
1836 0x000831ff, 0x41c20000, 0x000839ff, 0x41e20000,
1837 0x000841ff, 0x4a000000, 0x00084aff, 0x4a200000,
1838 0x000852ff, 0x4a400000, 0x00085aff, 0x4a610000,
1839 0x000862ff, 0x4a810000, 0x00086aff, 0x4aa10000,
1840 0x000873ff, 0x4ac20000, 0x00087bff, 0x4ae20000,
1841 0x000883ff, 0x52000000, 0x00088bff, 0x52200000,
1842 0x000894ff, 0x52400000, 0x00089cff, 0x52610000,
1843 0x0008a4ff, 0x52810000, 0x0008acff, 0x52a10000,
1844 0x0008b4ff, 0x52c20000, 0x0008bdff, 0x52e20000,
1845 0x0008c5ff, 0x5a000000, 0x0008cdff, 0x5a200000,
1846 0x0008d5ff, 0x5a400000, 0x0008deff, 0x5a610000,
1847 0x0008e6ff, 0x5a810000, 0x0008eeff, 0x5aa10000,
1848 0x0008f6ff, 0x5ac20000, 0x0008ffff, 0x5ae20000,
1849 0x000c00ff, 0x62000000, 0x000c08ff, 0x62200000,
1850 0x000c10ff, 0x62400000, 0x000c18ff, 0x62610000,
1851 0x000c20ff, 0x62810000, 0x000c29ff, 0x62a10000,
1852 0x000c31ff, 0x62c20000, 0x000c39ff, 0x62e20000,
1853 0x000c41ff, 0x6a000000, 0x000c4aff, 0x6a200000,
1854 0x000c52ff, 0x6a400000, 0x000c5aff, 0x6a610000,
1855 0x000c62ff, 0x6a810000, 0x000c6aff, 0x6aa10000,
1856 0x000c73ff, 0x6ac20000, 0x000c7bff, 0x6ae20000,
1857 0x000c83ff, 0x73000000, 0x000c8bff, 0x73200000,
1858 0x000c94ff, 0x73400000, 0x000c9cff, 0x73610000,
1859 0x000ca4ff, 0x73810000, 0x000cacff, 0x73a10000,
1860 0x000cb4ff, 0x73c20000, 0x000cbdff, 0x73e20000,
1861 0x000cc5ff, 0x7b000000, 0x000ccdff, 0x7b200000,
1862 0x000cd5ff, 0x7b400000, 0x000cdeff, 0x7b610000,
1863 0x000ce6ff, 0x7b810000, 0x000ceeff, 0x7ba10000,
1864 0x000cf6ff, 0x7bc20000, 0x000cffff, 0x7be20000,
1865 0x001000ff, 0x83000000, 0x001008ff, 0x83200000,
1866 0x001010ff, 0x83400000, 0x001018ff, 0x83610000,
1867 0x001020ff, 0x83810000, 0x001029ff, 0x83a10000,
1868 0x001031ff, 0x83c20000, 0x001039ff, 0x83e20000,
1869 0x001041ff, 0x8b000000, 0x00104aff, 0x8b200000,
1870 0x001052ff, 0x8b400000, 0x00105aff, 0x8b610000,
1871 0x001062ff, 0x8b810000, 0x00106aff, 0x8ba10000,
1872 0x001073ff, 0x8bc20000, 0x00107bff, 0x8be20000,
1873 0x001083ff, 0x94000000, 0x00108bff, 0x94200000,
1874 0x001094ff, 0x94400000, 0x00109cff, 0x94610000,
1875 0x0010a4ff, 0x94810000, 0x0010acff, 0x94a10000,
1876 0x0010b4ff, 0x94c20000, 0x0010bdff, 0x94e20000,
1877 0x0010c5ff, 0x9c000000, 0x0010cdff, 0x9c200000,
1878 0x0010d5ff, 0x9c400000, 0x0010deff, 0x9c610000,
1879 0x0010e6ff, 0x9c810000, 0x0010eeff, 0x9ca10000,
1880 0x0010f6ff, 0x9cc20000, 0x0010ffff, 0x9ce20000,
1881 0x001400ff, 0xa4000000, 0x001408ff, 0xa4200000,
1882 0x001410ff, 0xa4400000, 0x001418ff, 0xa4610000,
1883 0x001420ff, 0xa4810000, 0x001429ff, 0xa4a10000,
1884 0x001431ff, 0xa4c20000, 0x001439ff, 0xa4e20000,
1885 0x001441ff, 0xac000000, 0x00144aff, 0xac200000,
1886 0x001452ff, 0xac400000, 0x00145aff, 0xac610000,
1887 0x001462ff, 0xac810000, 0x00146aff, 0xaca10000,
1888 0x001473ff, 0xacc20000, 0x00147bff, 0xace20000,
1889 0x001483ff, 0xb4000000, 0x00148bff, 0xb4200000,
1890 0x001494ff, 0xb4400000, 0x00149cff, 0xb4610000,
1891 0x0014a4ff, 0xb4810000, 0x0014acff, 0xb4a10000,
1892 0x0014b4ff, 0xb4c20000, 0x0014bdff, 0xb4e20000,
1893 0x0014c5ff, 0xbd000000, 0x0014cdff, 0xbd200000,
1894 0x0014d5ff, 0xbd400000, 0x0014deff, 0xbd610000,
1895 0x0014e6ff, 0xbd810000, 0x0014eeff, 0xbda10000,
1896 0x0014f6ff, 0xbdc20000, 0x0014ffff, 0xbde20000,
1897 0x001800ff, 0xc5000000, 0x001808ff, 0xc5200000,
1898 0x001810ff, 0xc5400000, 0x001818ff, 0xc5610000,
1899 0x001820ff, 0xc5810000, 0x001829ff, 0xc5a10000,
1900 0x001831ff, 0xc5c20000, 0x001839ff, 0xc5e20000,
1901 0x001841ff, 0xcd000000, 0x00184aff, 0xcd200000,
1902 0x001852ff, 0xcd400000, 0x00185aff, 0xcd610000,
1903 0x001862ff, 0xcd810000, 0x00186aff, 0xcda10000,
1904 0x001873ff, 0xcdc20000, 0x00187bff, 0xcde20000,
1905 0x001883ff, 0xd5000000, 0x00188bff, 0xd5200000,
1906 0x001894ff, 0xd5400000, 0x00189cff, 0xd5610000,
1907 0x0018a4ff, 0xd5810000, 0x0018acff, 0xd5a10000,
1908 0x0018b4ff, 0xd5c20000, 0x0018bdff, 0xd5e20000,
1909 0x0018c5ff, 0xde000000, 0x0018cdff, 0xde200000,
1910 0x0018d5ff, 0xde400000, 0x0018deff, 0xde610000,
1911 0x0018e6ff, 0xde810000, 0x0018eeff, 0xdea10000,
1912 0x0018f6ff, 0xdec20000, 0x0018ffff, 0xdee20000,
1913 0x001c00ff, 0xe6000000, 0x001c08ff, 0xe6200000,
1914 0x001c10ff, 0xe6400000, 0x001c18ff, 0xe6610000,
1915 0x001c20ff, 0xe6810000, 0x001c29ff, 0xe6a10000,
1916 0x001c31ff, 0xe6c20000, 0x001c39ff, 0xe6e20000,
1917 0x001c41ff, 0xee000000, 0x001c4aff, 0xee200000,
1918 0x001c52ff, 0xee400000, 0x001c5aff, 0xee610000,
1919 0x001c62ff, 0xee810000, 0x001c6aff, 0xeea10000,
1920 0x001c73ff, 0xeec20000, 0x001c7bff, 0xeee20000,
1921 0x001c83ff, 0xf6000000, 0x001c8bff, 0xf6200000,
1922 0x001c94ff, 0xf6400000, 0x001c9cff, 0xf6610000,
1923 0x001ca4ff, 0xf6810000, 0x001cacff, 0xf6a10000,
1924 0x001cb4ff, 0xf6c20000, 0x001cbdff, 0xf6e20000,
1925 0x001cc5ff, 0xff000000, 0x001ccdff, 0xff200000,
1926 0x001cd5ff, 0xff400000, 0x001cdeff, 0xff610000,
1927 0x001ce6ff, 0xff810000, 0x001ceeff, 0xffa10000,
1928 0x001cf6ff, 0xffc20000, 0x001cffff, 0xffe20000,
1939 0x00000000, 0x000000ff, 0x08000000, 0x002000ff,
1940 0x10000000, 0x004000ff, 0x18000000, 0x006100ff,
1941 0x20000000, 0x008100ff, 0x29000000, 0x00a100ff,
1942 0x31000000, 0x00c200ff, 0x39000000, 0x00e200ff,
1943 0x41000000, 0x000008ff, 0x4a000000, 0x002008ff,
1944 0x52000000, 0x004008ff, 0x5a000000, 0x006108ff,
1945 0x62000000, 0x008108ff, 0x6a000000, 0x00a108ff,
1946 0x73000000, 0x00c208ff, 0x7b000000, 0x00e208ff,
1947 0x83000000, 0x000010ff, 0x8b000000, 0x002010ff,
1948 0x94000000, 0x004010ff, 0x9c000000, 0x006110ff,
1949 0xa4000000, 0x008110ff, 0xac000000, 0x00a110ff,
1950 0xb4000000, 0x00c210ff, 0xbd000000, 0x00e210ff,
1951 0xc5000000, 0x000018ff, 0xcd000000, 0x002018ff,
1952 0xd5000000, 0x004018ff, 0xde000000, 0x006118ff,
1953 0xe6000000, 0x008118ff, 0xee000000, 0x00a118ff,
1954 0xf6000000, 0x00c218ff, 0xff000000, 0x00e218ff,
1955 0x00040000, 0x000020ff, 0x08040000, 0x002020ff,
1956 0x10040000, 0x004020ff, 0x18040000, 0x006120ff,
1957 0x20040000, 0x008120ff, 0x29040000, 0x00a120ff,
1958 0x31040000, 0x00c220ff, 0x39040000, 0x00e220ff,
1959 0x41040000, 0x000029ff, 0x4a040000, 0x002029ff,
1960 0x52040000, 0x004029ff, 0x5a040000, 0x006129ff,
1961 0x62040000, 0x008129ff, 0x6a040000, 0x00a129ff,
1962 0x73040000, 0x00c229ff, 0x7b040000, 0x00e229ff,
1963 0x83040000, 0x000031ff, 0x8b040000, 0x002031ff,
1964 0x94040000, 0x004031ff, 0x9c040000, 0x006131ff,
1965 0xa4040000, 0x008131ff, 0xac040000, 0x00a131ff,
1966 0xb4040000, 0x00c231ff, 0xbd040000, 0x00e231ff,
1967 0xc5040000, 0x000039ff, 0xcd040000, 0x002039ff,
1968 0xd5040000, 0x004039ff, 0xde040000, 0x006139ff,
1969 0xe6040000, 0x008139ff, 0xee040000, 0x00a139ff,
1970 0xf6040000, 0x00c239ff, 0xff040000, 0x00e239ff,
1971 0x00080000, 0x000041ff, 0x08080000, 0x002041ff,
1972 0x10080000, 0x004041ff, 0x18080000, 0x006141ff,
1973 0x20080000, 0x008141ff, 0x29080000, 0x00a141ff,
1974 0x31080000, 0x00c241ff, 0x39080000, 0x00e241ff,
1975 0x41080000, 0x00004aff, 0x4a080000, 0x00204aff,
1976 0x52080000, 0x00404aff, 0x5a080000, 0x00614aff,
1977 0x62080000, 0x00814aff, 0x6a080000, 0x00a14aff,
1978 0x73080000, 0x00c24aff, 0x7b080000, 0x00e24aff,
1979 0x83080000, 0x000052ff, 0x8b080000, 0x002052ff,
1980 0x94080000, 0x004052ff, 0x9c080000, 0x006152ff,
1981 0xa4080000, 0x008152ff, 0xac080000, 0x00a152ff,
1982 0xb4080000, 0x00c252ff, 0xbd080000, 0x00e252ff,
1983 0xc5080000, 0x00005aff, 0xcd080000, 0x00205aff,
1984 0xd5080000, 0x00405aff, 0xde080000, 0x00615aff,
1985 0xe6080000, 0x00815aff, 0xee080000, 0x00a15aff,
1986 0xf6080000, 0x00c25aff, 0xff080000, 0x00e25aff,
1987 0x000c0000, 0x000062ff, 0x080c0000, 0x002062ff,
1988 0x100c0000, 0x004062ff, 0x180c0000, 0x006162ff,
1989 0x200c0000, 0x008162ff, 0x290c0000, 0x00a162ff,
1990 0x310c0000, 0x00c262ff, 0x390c0000, 0x00e262ff,
1991 0x410c0000, 0x00006aff, 0x4a0c0000, 0x00206aff,
1992 0x520c0000, 0x00406aff, 0x5a0c0000, 0x00616aff,
1993 0x620c0000, 0x00816aff, 0x6a0c0000, 0x00a16aff,
1994 0x730c0000, 0x00c26aff, 0x7b0c0000, 0x00e26aff,
1995 0x830c0000, 0x000073ff, 0x8b0c0000, 0x002073ff,
1996 0x940c0000, 0x004073ff, 0x9c0c0000, 0x006173ff,
1997 0xa40c0000, 0x008173ff, 0xac0c0000, 0x00a173ff,
1998 0xb40c0000, 0x00c273ff, 0xbd0c0000, 0x00e273ff,
1999 0xc50c0000, 0x00007bff, 0xcd0c0000, 0x00207bff,
2000 0xd50c0000, 0x00407bff, 0xde0c0000, 0x00617bff,
2001 0xe60c0000, 0x00817bff, 0xee0c0000, 0x00a17bff,
2002 0xf60c0000, 0x00c27bff, 0xff0c0000, 0x00e27bff,
2003 0x00100000, 0x000083ff, 0x08100000, 0x002083ff,
2004 0x10100000, 0x004083ff, 0x18100000, 0x006183ff,
2005 0x20100000, 0x008183ff, 0x29100000, 0x00a183ff,
2006 0x31100000, 0x00c283ff, 0x39100000, 0x00e283ff,
2007 0x41100000, 0x00008bff, 0x4a100000, 0x00208bff,
2008 0x52100000, 0x00408bff, 0x5a100000, 0x00618bff,
2009 0x62100000, 0x00818bff, 0x6a100000, 0x00a18bff,
2010 0x73100000, 0x00c28bff, 0x7b100000, 0x00e28bff,
2011 0x83100000, 0x000094ff, 0x8b100000, 0x002094ff,
2012 0x94100000, 0x004094ff, 0x9c100000, 0x006194ff,
2013 0xa4100000, 0x008194ff, 0xac100000, 0x00a194ff,
2014 0xb4100000, 0x00c294ff, 0xbd100000, 0x00e294ff,
2015 0xc5100000, 0x00009cff, 0xcd100000, 0x00209cff,
2016 0xd5100000, 0x00409cff, 0xde100000, 0x00619cff,
2017 0xe6100000, 0x00819cff, 0xee100000, 0x00a19cff,
2018 0xf6100000, 0x00c29cff, 0xff100000, 0x00e29cff,
2019 0x00140000, 0x0000a4ff, 0x08140000, 0x0020a4ff,
2020 0x10140000, 0x0040a4ff, 0x18140000, 0x0061a4ff,
2021 0x20140000, 0x0081a4ff, 0x29140000, 0x00a1a4ff,
2022 0x31140000, 0x00c2a4ff, 0x39140000, 0x00e2a4ff,
2023 0x41140000, 0x0000acff, 0x4a140000, 0x0020acff,
2024 0x52140000, 0x0040acff, 0x5a140000, 0x0061acff,
2025 0x62140000, 0x0081acff, 0x6a140000, 0x00a1acff,
2026 0x73140000, 0x00c2acff, 0x7b140000, 0x00e2acff,
2027 0x83140000, 0x0000b4ff, 0x8b140000, 0x0020b4ff,
2028 0x94140000, 0x0040b4ff, 0x9c140000, 0x0061b4ff,
2029 0xa4140000, 0x0081b4ff, 0xac140000, 0x00a1b4ff,
2030 0xb4140000, 0x00c2b4ff, 0xbd140000, 0x00e2b4ff,
2031 0xc5140000, 0x0000bdff, 0xcd140000, 0x0020bdff,
2032 0xd5140000, 0x0040bdff, 0xde140000, 0x0061bdff,
2033 0xe6140000, 0x0081bdff, 0xee140000, 0x00a1bdff,
2034 0xf6140000, 0x00c2bdff, 0xff140000, 0x00e2bdff,
2035 0x00180000, 0x0000c5ff, 0x08180000, 0x0020c5ff,
2036 0x10180000, 0x0040c5ff, 0x18180000, 0x0061c5ff,
2037 0x20180000, 0x0081c5ff, 0x29180000, 0x00a1c5ff,
2038 0x31180000, 0x00c2c5ff, 0x39180000, 0x00e2c5ff,
2039 0x41180000, 0x0000cdff, 0x4a180000, 0x0020cdff,
2040 0x52180000, 0x0040cdff, 0x5a180000, 0x0061cdff,
2041 0x62180000, 0x0081cdff, 0x6a180000, 0x00a1cdff,
2042 0x73180000, 0x00c2cdff, 0x7b180000, 0x00e2cdff,
2043 0x83180000, 0x0000d5ff, 0x8b180000, 0x0020d5ff,
2044 0x94180000, 0x0040d5ff, 0x9c180000, 0x0061d5ff,
2045 0xa4180000, 0x0081d5ff, 0xac180000, 0x00a1d5ff,
2046 0xb4180000, 0x00c2d5ff, 0xbd180000, 0x00e2d5ff,
2047 0xc5180000, 0x0000deff, 0xcd180000, 0x0020deff,
2048 0xd5180000, 0x0040deff, 0xde180000, 0x0061deff,
2049 0xe6180000, 0x0081deff, 0xee180000, 0x00a1deff,
2050 0xf6180000, 0x00c2deff, 0xff180000, 0x00e2deff,
2051 0x001c0000, 0x0000e6ff, 0x081c0000, 0x0020e6ff,
2052 0x101c0000, 0x0040e6ff, 0x181c0000, 0x0061e6ff,
2053 0x201c0000, 0x0081e6ff, 0x291c0000, 0x00a1e6ff,
2054 0x311c0000, 0x00c2e6ff, 0x391c0000, 0x00e2e6ff,
2055 0x411c0000, 0x0000eeff, 0x4a1c0000, 0x0020eeff,
2056 0x521c0000, 0x0040eeff, 0x5a1c0000, 0x0061eeff,
2057 0x621c0000, 0x0081eeff, 0x6a1c0000, 0x00a1eeff,
2058 0x731c0000, 0x00c2eeff, 0x7b1c0000, 0x00e2eeff,
2059 0x831c0000, 0x0000f6ff, 0x8b1c0000, 0x0020f6ff,
2060 0x941c0000, 0x0040f6ff, 0x9c1c0000, 0x0061f6ff,
2061 0xa41c0000, 0x0081f6ff, 0xac1c0000, 0x00a1f6ff,
2062 0xb41c0000, 0x00c2f6ff, 0xbd1c0000, 0x00e2f6ff,
2063 0xc51c0000, 0x0000ffff, 0xcd1c0000, 0x0020ffff,
2064 0xd51c0000, 0x0040ffff, 0xde1c0000, 0x0061ffff,
2065 0xe61c0000, 0x0081ffff, 0xee1c0000, 0x00a1ffff,
2066 0xf61c0000, 0x00c2ffff, 0xff1c0000, 0x00e2ffff
2080 #ifndef USE_DUFFS_LOOP
2087 int srcskip, dstskip;
2106 #ifdef USE_DUFFS_LOOP
2113 *
dst = ((sR>>5)<<(3+2))|
2126 *
dst = ((sR >> 5) << (3 + 2)) |
2127 ((sG >> 5) << (2)) | ((sB >> 6) << (0));
2138 #ifdef USE_DUFFS_LOOP
2145 *
dst =
map[((sR>>5)<<(3+2))|
2158 *
dst =
map[((sR >> 5) << (3 + 2)) |
2159 ((sG >> 5) << (2)) | ((sB >> 6) << (0))];
2184 if (dstfmt->
Amask) {
2251 int *_p0 ,
int *_p1,
int *_p2,
int *_p3,
int *_alpha_channel)
2253 int alpha_channel = 0, p0, p1, p2, p3;
2254 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2255 int Pixel = 0x04030201;
2257 int Pixel = 0x01020304;
2262 if (srcfmt->
Amask) {
2269 if (dstfmt->
Amask) {
2270 if (srcfmt->
Amask) {
2279 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2281 p1 = (Pixel >> 8) & 0xFF;
2282 p2 = (Pixel >> 16) & 0xFF;
2283 p3 = (Pixel >> 24) & 0xFF;
2286 p2 = (Pixel >> 8) & 0xFF;
2287 p1 = (Pixel >> 16) & 0xFF;
2288 p0 = (Pixel >> 24) & 0xFF;
2294 }
else if (p1 == 0) {
2297 }
else if (p2 == 0) {
2300 }
else if (p3 == 0) {
2305 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2307 if (srcbpp == 3 && dstbpp == 4) {
2312 }
else if (srcbpp == 4 && dstbpp == 3) {
2323 if (_alpha_channel) {
2324 *_alpha_channel = alpha_channel;
2344 #if HAVE_FAST_WRITE_INT8
2346 if (srcbpp == 4 && dstbpp == 4 &&
2351 int alpha_channel, p0, p1, p2, p3;
2375 if (srcbpp == 4 && dstbpp == 3 &&
2399 #if HAVE_FAST_WRITE_INT8
2401 if (srcbpp == 3 && dstbpp == 4 &&
2405 int alpha_channel, p0, p1, p2, p3;
2463 #if HAVE_FAST_WRITE_INT8
2465 if (srcbpp == 4 && dstbpp == 4 &&
2495 unsigned sR, sG, sB, sA;
2521 unsigned sR, sG, sB;
2527 if (palmap ==
NULL) {
2534 if ( (Pixel & rgbmask) != ckey ) {
2555 if ( (Pixel & rgbmask) != ckey ) {
2557 *
dst = (
Uint8)palmap[((sR>>5)<<(3+2))|
2593 if ( (*srcp & rgbmask) != ckey ) {
2622 int sfmt = srcfmt->
format;
2623 int dfmt = dstfmt->
format;
2633 if (dstfmt->
Amask) {
2640 if ((*src32 & rgbmask) != ckey) {
2641 *dst32 = *src32 |
mask;
2658 if ((*src32 & rgbmask) != ckey) {
2659 *dst32 = *src32 &
mask;
2672 #if HAVE_FAST_WRITE_INT8
2674 if (srcbpp == 4 && dstbpp == 4 &&
2679 int alpha_channel, p0, p1, p2, p3;
2688 if ((*src32 & rgbmask) != ckey) {
2710 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2711 Uint8 k0 = ckey & 0xFF;
2712 Uint8 k1 = (ckey >> 8) & 0xFF;
2713 Uint8 k2 = (ckey >> 16) & 0xFF;
2715 Uint8 k0 = (ckey >> 16) & 0xFF;
2716 Uint8 k1 = (ckey >> 8) & 0xFF;
2717 Uint8 k2 = ckey & 0xFF;
2728 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2748 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2749 Uint8 k0 = ckey & 0xFF;
2750 Uint8 k1 = (ckey >> 8) & 0xFF;
2751 Uint8 k2 = (ckey >> 16) & 0xFF;
2753 Uint8 k0 = (ckey >> 16) & 0xFF;
2754 Uint8 k1 = (ckey >> 8) & 0xFF;
2755 Uint8 k2 = ckey & 0xFF;
2765 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2783 if (srcbpp == 4 && dstbpp == 3 &&
2795 if ((*src32 & rgbmask) != ckey) {
2810 #if HAVE_FAST_WRITE_INT8
2812 if (srcbpp == 3 && dstbpp == 4 &&
2815 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
2816 Uint8 k0 = ckey & 0xFF;
2817 Uint8 k1 = (ckey >> 8) & 0xFF;
2818 Uint8 k2 = (ckey >> 16) & 0xFF;
2820 Uint8 k0 = (ckey >> 16) & 0xFF;
2821 Uint8 k1 = (ckey >> 8) & 0xFF;
2822 Uint8 k2 = ckey & 0xFF;
2826 int alpha_channel, p0, p1, p2, p3;
2837 if (k0 !=
s0 || k1 !=
s1 || k2 != s2) {
2864 if ( (Pixel & rgbmask) != ckey ) {
2895 unsigned sR, sG, sB, sA;
2916 if ((*src32 & rgbmask) != ckey) {
2931 #if HAVE_FAST_WRITE_INT8
2933 if (srcbpp == 4 && dstbpp == 4 &&
2946 if ((*src32 & rgbmask) != ckey) {
2968 if ( (Pixel & rgbmask) != ckey ) {
2994 unsigned sR, sG, sB, sA;
3026 unsigned sR, sG, sB, sA;
3060 if (dstfmt->
Amask) {
3063 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3064 int i0 = 0,
i1 = 1,
i2 = 2;
3066 int i0 = srcbpp - 1 - 0;
3067 int i1 = srcbpp - 1 - 1;
3068 int i2 = srcbpp - 1 - 2;
3078 *dst32 = (
s0) | (
s1 << 8) | (s2 << 16) |
mask;
3088 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3089 int i0 = 0,
i1 = 1,
i2 = 2;
3090 int j0 = 0,
j1 = 1, j2 = 2;
3092 int i0 = srcbpp - 1 - 0;
3093 int i1 = srcbpp - 1 - 1;
3094 int i2 = srcbpp - 1 - 2;
3095 int j0 = dstbpp - 1 - 0;
3096 int j1 = dstbpp - 1 - 1;
3097 int j2 = dstbpp - 1 - 2;
3134 if (dstfmt->
Amask) {
3135 if (srcfmt->
Amask) {
3139 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3140 int i0 = 0,
i1 = 1,
i2 = 2, i3 = 3;
3142 int i0 = 3,
i1 = 2,
i2 = 1, i3 = 0;
3153 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) | alphashift;
3164 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3165 int i0 = 0,
i1 = 1,
i2 = 2;
3167 int i0 = srcbpp - 1 - 0;
3168 int i1 = srcbpp - 1 - 1;
3169 int i2 = srcbpp - 1 - 2;
3180 *dst32 = (
s0 << 16) | (
s1 << 8) | (s2) |
mask;
3191 #if SDL_BYTEORDER == SDL_LIL_ENDIAN
3192 int i0 = 0,
i1 = 1,
i2 = 2;
3193 int j0 = 2,
j1 = 1, j2 = 0;
3195 int i0 = srcbpp - 1 - 0;
3196 int i1 = srcbpp - 1 - 1;
3197 int i2 = srcbpp - 1 - 2;
3198 int j0 = dstbpp - 1 - 2;
3199 int j1 = dstbpp - 1 - 1;
3200 int j2 = dstbpp - 1 - 0;
3226 #define COPY_ALPHA 4
3238 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3242 #if SDL_ALTIVEC_BLITTERS
3244 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3246 {0x00007C00, 0x000003E0, 0x0000001F, 4, 0x00000000, 0x00000000, 0x00000000,
3249 #if SDL_ARM_SIMD_BLITTERS
3250 {0x00000F00, 0x000000F0, 0x0000000F, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3253 #if SDL_HAVE_BLIT_N_RGB565
3254 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3256 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3258 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0xFF000000, 0x00FF0000, 0x0000FF00,
3260 {0x0000F800, 0x000007E0, 0x0000001F, 4, 0x0000FF00, 0x00FF0000, 0xFF000000,
3265 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3270 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3272 #if HAVE_FAST_WRITE_INT8
3276 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3278 #if HAVE_FAST_WRITE_INT8
3283 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3285 #if HAVE_FAST_WRITE_INT8
3289 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3291 #if HAVE_FAST_WRITE_INT8
3296 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3298 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3301 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3305 #if SDL_ALTIVEC_BLITTERS
3307 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3310 {0x00000000, 0x00000000, 0x00000000, 4, 0x00000000, 0x00000000, 0x00000000,
3313 {0x00000000, 0x00000000, 0x00000000, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3316 #if SDL_ARM_SIMD_BLITTERS
3317 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3321 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3323 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3326 {0x000000FF, 0x0000FF00, 0x00FF0000, 3, 0x00FF0000, 0x0000FF00, 0x000000FF,
3328 {0x00FF0000, 0x0000FF00, 0x000000FF, 3, 0x000000FF, 0x0000FF00, 0x00FF0000,
3331 {0x000000FF, 0x0000FF00, 0x00FF0000, 4, 0x00FF0000, 0x0000FF00, 0x000000FF,
3333 #if HAVE_FAST_WRITE_INT8
3337 {0x00FF0000, 0x0000FF00, 0x000000FF, 4, 0x000000FF, 0x0000FF00, 0x00FF0000,
3339 #if HAVE_FAST_WRITE_INT8
3344 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x0000F800, 0x000007E0, 0x0000001F,
3346 {0x00FF0000, 0x0000FF00, 0x000000FF, 2, 0x00007C00, 0x000003E0, 0x0000001F,
3349 {0, 0, 0, 0, 0, 0, 0, 0,
BlitNtoN, 0}
3357 #define MASKOK(x, y) (((x) == (y)) || ((y) == 0x00000000))
3382 (srcfmt->
Rmask == 0x00FF0000) &&
3383 (srcfmt->
Gmask == 0x0000FF00) &&
3384 (srcfmt->
Bmask == 0x000000FF)) {
3387 (srcfmt->
Rmask == 0x3FF00000) &&
3388 (srcfmt->
Gmask == 0x000FFC00) &&
3389 (srcfmt->
Bmask == 0x000003FF)) {
3400 for (which = 0;
table[which].dstbpp; ++which) {
3408 (a_need &
table[which].alpha) == a_need &&
3453 #if SDL_ALTIVEC_BLITTERS
3456 return Blit32to32KeyAltivec;