|
|
@ -26,46 +26,60 @@ |
|
|
|
|
|
|
|
|
|
|
|
#if HAVE_XFORM_ASM |
|
|
|
#if HAVE_XFORM_ASM |
|
|
|
|
|
|
|
|
|
|
|
#define AV_RL16 AV_RL16 |
|
|
|
#if HAVE_BIGENDIAN |
|
|
|
static av_always_inline uint16_t AV_RL16(const void *p) |
|
|
|
#define AV_RL16 AV_RX16 |
|
|
|
|
|
|
|
#define AV_WL16 AV_WX16 |
|
|
|
|
|
|
|
#define AV_RL32 AV_RX32 |
|
|
|
|
|
|
|
#define AV_WL32 AV_WX32 |
|
|
|
|
|
|
|
#else |
|
|
|
|
|
|
|
#define AV_RB16 AV_RX16 |
|
|
|
|
|
|
|
#define AV_WB16 AV_WX16 |
|
|
|
|
|
|
|
#define AV_RB32 AV_RX32 |
|
|
|
|
|
|
|
#define AV_WB32 AV_WX32 |
|
|
|
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static av_always_inline uint16_t AV_RX16(const void *p) |
|
|
|
{ |
|
|
|
{ |
|
|
|
uint16_t v; |
|
|
|
uint16_t v; |
|
|
|
__asm__ ("lhbrx %0, %y1" : "=r"(v) : "Z"(*(const uint16_t*)p)); |
|
|
|
__asm__ ("lhbrx %0, %y1" : "=r"(v) : "Z"(*(const uint16_t*)p)); |
|
|
|
return v; |
|
|
|
return v; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#define AV_WL16 AV_WL16 |
|
|
|
static av_always_inline void AV_WX16(void *p, uint16_t v) |
|
|
|
static av_always_inline void AV_WL16(void *p, uint16_t v) |
|
|
|
|
|
|
|
{ |
|
|
|
{ |
|
|
|
__asm__ ("sthbrx %1, %y0" : "=Z"(*(uint16_t*)p) : "r"(v)); |
|
|
|
__asm__ ("sthbrx %1, %y0" : "=Z"(*(uint16_t*)p) : "r"(v)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#define AV_RL32 AV_RL32 |
|
|
|
static av_always_inline uint32_t AV_RX32(const void *p) |
|
|
|
static av_always_inline uint32_t AV_RL32(const void *p) |
|
|
|
|
|
|
|
{ |
|
|
|
{ |
|
|
|
uint32_t v; |
|
|
|
uint32_t v; |
|
|
|
__asm__ ("lwbrx %0, %y1" : "=r"(v) : "Z"(*(const uint32_t*)p)); |
|
|
|
__asm__ ("lwbrx %0, %y1" : "=r"(v) : "Z"(*(const uint32_t*)p)); |
|
|
|
return v; |
|
|
|
return v; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#define AV_WL32 AV_WL32 |
|
|
|
static av_always_inline void AV_WX32(void *p, uint32_t v) |
|
|
|
static av_always_inline void AV_WL32(void *p, uint32_t v) |
|
|
|
|
|
|
|
{ |
|
|
|
{ |
|
|
|
__asm__ ("stwbrx %1, %y0" : "=Z"(*(uint32_t*)p) : "r"(v)); |
|
|
|
__asm__ ("stwbrx %1, %y0" : "=Z"(*(uint32_t*)p) : "r"(v)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#if HAVE_LDBRX |
|
|
|
#if HAVE_LDBRX |
|
|
|
|
|
|
|
|
|
|
|
#define AV_RL64 AV_RL64 |
|
|
|
#if HAVE_BIGENDIAN |
|
|
|
static av_always_inline uint64_t AV_RL64(const void *p) |
|
|
|
#define AV_RL64 AV_RX64 |
|
|
|
|
|
|
|
#define AV_WL64 AV_WX64 |
|
|
|
|
|
|
|
#else |
|
|
|
|
|
|
|
#define AV_RB64 AV_RX64 |
|
|
|
|
|
|
|
#define AV_WB64 AV_WX64 |
|
|
|
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static av_always_inline uint64_t AV_RX64(const void *p) |
|
|
|
{ |
|
|
|
{ |
|
|
|
uint64_t v; |
|
|
|
uint64_t v; |
|
|
|
__asm__ ("ldbrx %0, %y1" : "=r"(v) : "Z"(*(const uint64_t*)p)); |
|
|
|
__asm__ ("ldbrx %0, %y1" : "=r"(v) : "Z"(*(const uint64_t*)p)); |
|
|
|
return v; |
|
|
|
return v; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#define AV_WL64 AV_WL64 |
|
|
|
static av_always_inline void AV_WX64(void *p, uint64_t v) |
|
|
|
static av_always_inline void AV_WL64(void *p, uint64_t v) |
|
|
|
|
|
|
|
{ |
|
|
|
{ |
|
|
|
__asm__ ("stdbrx %1, %y0" : "=Z"(*(uint64_t*)p) : "r"(v)); |
|
|
|
__asm__ ("stdbrx %1, %y0" : "=Z"(*(uint64_t*)p) : "r"(v)); |
|
|
|
} |
|
|
|
} |
|
|
@ -102,7 +116,9 @@ static av_always_inline void AV_WL64(void *p, uint64_t v) |
|
|
|
* default, so we override it here. |
|
|
|
* default, so we override it here. |
|
|
|
*/ |
|
|
|
*/ |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#if HAVE_BIGENDIAN |
|
|
|
#define AV_RB64(p) (*(const uint64_t *)(p)) |
|
|
|
#define AV_RB64(p) (*(const uint64_t *)(p)) |
|
|
|
#define AV_WB64(p, v) (*(uint64_t *)(p) = (v)) |
|
|
|
#define AV_WB64(p, v) (*(uint64_t *)(p) = (v)) |
|
|
|
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
|
|
#endif /* AVUTIL_PPC_INTREADWRITE_H */ |
|
|
|
#endif /* AVUTIL_PPC_INTREADWRITE_H */ |
|
|
|