Commit c3fec9b5 authored by Sebastian Lackner's avatar Sebastian Lackner Committed by Alexandre Julliard

vcomp: Implement 8-bit atomic instructions.

parent b86ed6fe
...@@ -240,6 +240,14 @@ static void CDECL _vcomp_fork_call_wrapper(void *wrapper, int nargs, __ms_va_lis ...@@ -240,6 +240,14 @@ static void CDECL _vcomp_fork_call_wrapper(void *wrapper, int nargs, __ms_va_lis
#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) #if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
static inline char interlocked_cmpxchg8(char *dest, char xchg, char compare)
{
char ret;
__asm__ __volatile__( "lock; cmpxchgb %2,(%1)"
: "=a" (ret) : "r" (dest), "q" (xchg), "0" (compare) : "memory" );
return ret;
}
static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare) static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare)
{ {
short ret; short ret;
...@@ -248,6 +256,14 @@ static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare ...@@ -248,6 +256,14 @@ static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare
return ret; return ret;
} }
static inline char interlocked_xchg_add8(char *dest, char incr)
{
char ret;
__asm__ __volatile__( "lock; xaddb %0,(%1)"
: "=q" (ret) : "r" (dest), "0" (incr) : "memory" );
return ret;
}
static inline short interlocked_xchg_add16(short *dest, short incr) static inline short interlocked_xchg_add16(short *dest, short incr)
{ {
short ret; short ret;
...@@ -258,6 +274,35 @@ static inline short interlocked_xchg_add16(short *dest, short incr) ...@@ -258,6 +274,35 @@ static inline short interlocked_xchg_add16(short *dest, short incr)
#else /* __GNUC__ */ #else /* __GNUC__ */
#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_1
static inline char interlocked_cmpxchg8(char *dest, char xchg, char compare)
{
return __sync_val_compare_and_swap(dest, compare, xchg);
}
static inline char interlocked_xchg_add8(char *dest, char incr)
{
return __sync_fetch_and_add(dest, incr);
}
#else
static char interlocked_cmpxchg8(char *dest, char xchg, char compare)
{
EnterCriticalSection(&vcomp_section);
if (*dest == compare) *dest = xchg; else compare = *dest;
LeaveCriticalSection(&vcomp_section);
return compare;
}
static char interlocked_xchg_add8(char *dest, char incr)
{
char ret;
EnterCriticalSection(&vcomp_section);
ret = *dest; *dest += incr;
LeaveCriticalSection(&vcomp_section);
return ret;
}
#endif
#ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_2 #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_2
static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare) static inline short interlocked_cmpxchg16(short *dest, short xchg, short compare)
{ {
...@@ -343,6 +388,70 @@ static void vcomp_free_thread_data(void) ...@@ -343,6 +388,70 @@ static void vcomp_free_thread_data(void)
vcomp_set_thread_data(NULL); vcomp_set_thread_data(NULL);
} }
void CDECL _vcomp_atomic_add_i1(char *dest, char val)
{
interlocked_xchg_add8(dest, val);
}
void CDECL _vcomp_atomic_and_i1(char *dest, char val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old & val, old) != old);
}
void CDECL _vcomp_atomic_div_i1(char *dest, char val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old / val, old) != old);
}
void CDECL _vcomp_atomic_div_ui1(unsigned char *dest, unsigned char val)
{
unsigned char old;
do old = *dest; while ((unsigned char)interlocked_cmpxchg8((char *)dest, old / val, old) != old);
}
void CDECL _vcomp_atomic_mul_i1(char *dest, char val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old * val, old) != old);
}
void CDECL _vcomp_atomic_or_i1(char *dest, char val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old | val, old) != old);
}
void CDECL _vcomp_atomic_shl_i1(char *dest, unsigned int val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old << val, old) != old);
}
void CDECL _vcomp_atomic_shr_i1(char *dest, unsigned int val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old >> val, old) != old);
}
void CDECL _vcomp_atomic_shr_ui1(unsigned char *dest, unsigned int val)
{
unsigned char old;
do old = *dest; while ((unsigned char)interlocked_cmpxchg8((char *)dest, old >> val, old) != old);
}
void CDECL _vcomp_atomic_sub_i1(char *dest, char val)
{
interlocked_xchg_add8(dest, -val);
}
void CDECL _vcomp_atomic_xor_i1(char *dest, char val)
{
char old;
do old = *dest; while (interlocked_cmpxchg8(dest, old ^ val, old) != old);
}
void CDECL _vcomp_atomic_add_i2(short *dest, short val) void CDECL _vcomp_atomic_add_i2(short *dest, short val)
{ {
interlocked_xchg_add16(dest, val); interlocked_xchg_add16(dest, val);
......
@ stub _vcomp_atomic_add_i1 @ cdecl _vcomp_atomic_add_i1(ptr long)
@ cdecl _vcomp_atomic_add_i2(ptr long) @ cdecl _vcomp_atomic_add_i2(ptr long)
@ cdecl _vcomp_atomic_add_i4(ptr long) @ cdecl _vcomp_atomic_add_i4(ptr long)
@ cdecl _vcomp_atomic_add_i8(ptr int64) @ cdecl _vcomp_atomic_add_i8(ptr int64)
@ cdecl _vcomp_atomic_add_r4(ptr float) @ cdecl _vcomp_atomic_add_r4(ptr float)
@ cdecl _vcomp_atomic_add_r8(ptr double) @ cdecl _vcomp_atomic_add_r8(ptr double)
@ stub _vcomp_atomic_and_i1 @ cdecl _vcomp_atomic_and_i1(ptr long)
@ cdecl _vcomp_atomic_and_i2(ptr long) @ cdecl _vcomp_atomic_and_i2(ptr long)
@ cdecl _vcomp_atomic_and_i4(ptr long) @ cdecl _vcomp_atomic_and_i4(ptr long)
@ cdecl _vcomp_atomic_and_i8(ptr int64) @ cdecl _vcomp_atomic_and_i8(ptr int64)
@ stub _vcomp_atomic_div_i1 @ cdecl _vcomp_atomic_div_i1(ptr long)
@ cdecl _vcomp_atomic_div_i2(ptr long) @ cdecl _vcomp_atomic_div_i2(ptr long)
@ cdecl _vcomp_atomic_div_i4(ptr long) @ cdecl _vcomp_atomic_div_i4(ptr long)
@ cdecl _vcomp_atomic_div_i8(ptr int64) @ cdecl _vcomp_atomic_div_i8(ptr int64)
@ cdecl _vcomp_atomic_div_r4(ptr float) @ cdecl _vcomp_atomic_div_r4(ptr float)
@ cdecl _vcomp_atomic_div_r8(ptr double) @ cdecl _vcomp_atomic_div_r8(ptr double)
@ stub _vcomp_atomic_div_ui1 @ cdecl _vcomp_atomic_div_ui1(ptr long)
@ cdecl _vcomp_atomic_div_ui2(ptr long) @ cdecl _vcomp_atomic_div_ui2(ptr long)
@ cdecl _vcomp_atomic_div_ui4(ptr long) @ cdecl _vcomp_atomic_div_ui4(ptr long)
@ cdecl _vcomp_atomic_div_ui8(ptr int64) @ cdecl _vcomp_atomic_div_ui8(ptr int64)
@ stub _vcomp_atomic_mul_i1 @ cdecl _vcomp_atomic_mul_i1(ptr long)
@ cdecl _vcomp_atomic_mul_i2(ptr long) @ cdecl _vcomp_atomic_mul_i2(ptr long)
@ cdecl _vcomp_atomic_mul_i4(ptr long) @ cdecl _vcomp_atomic_mul_i4(ptr long)
@ cdecl _vcomp_atomic_mul_i8(ptr int64) @ cdecl _vcomp_atomic_mul_i8(ptr int64)
@ cdecl _vcomp_atomic_mul_r4(ptr float) @ cdecl _vcomp_atomic_mul_r4(ptr float)
@ cdecl _vcomp_atomic_mul_r8(ptr double) @ cdecl _vcomp_atomic_mul_r8(ptr double)
@ stub _vcomp_atomic_or_i1 @ cdecl _vcomp_atomic_or_i1(ptr long)
@ cdecl _vcomp_atomic_or_i2(ptr long) @ cdecl _vcomp_atomic_or_i2(ptr long)
@ cdecl _vcomp_atomic_or_i4(ptr long) @ cdecl _vcomp_atomic_or_i4(ptr long)
@ cdecl _vcomp_atomic_or_i8(ptr int64) @ cdecl _vcomp_atomic_or_i8(ptr int64)
@ stub _vcomp_atomic_shl_i1 @ cdecl _vcomp_atomic_shl_i1(ptr long)
@ cdecl _vcomp_atomic_shl_i2(ptr long) @ cdecl _vcomp_atomic_shl_i2(ptr long)
@ cdecl _vcomp_atomic_shl_i4(ptr long) @ cdecl _vcomp_atomic_shl_i4(ptr long)
@ cdecl _vcomp_atomic_shl_i8(ptr long) @ cdecl _vcomp_atomic_shl_i8(ptr long)
@ stub _vcomp_atomic_shr_i1 @ cdecl _vcomp_atomic_shr_i1(ptr long)
@ cdecl _vcomp_atomic_shr_i2(ptr long) @ cdecl _vcomp_atomic_shr_i2(ptr long)
@ cdecl _vcomp_atomic_shr_i4(ptr long) @ cdecl _vcomp_atomic_shr_i4(ptr long)
@ cdecl _vcomp_atomic_shr_i8(ptr long) @ cdecl _vcomp_atomic_shr_i8(ptr long)
@ stub _vcomp_atomic_shr_ui1 @ cdecl _vcomp_atomic_shr_ui1(ptr long)
@ cdecl _vcomp_atomic_shr_ui2(ptr long) @ cdecl _vcomp_atomic_shr_ui2(ptr long)
@ cdecl _vcomp_atomic_shr_ui4(ptr long) @ cdecl _vcomp_atomic_shr_ui4(ptr long)
@ cdecl _vcomp_atomic_shr_ui8(ptr long) @ cdecl _vcomp_atomic_shr_ui8(ptr long)
@ stub _vcomp_atomic_sub_i1 @ cdecl _vcomp_atomic_sub_i1(ptr long)
@ cdecl _vcomp_atomic_sub_i2(ptr long) @ cdecl _vcomp_atomic_sub_i2(ptr long)
@ cdecl _vcomp_atomic_sub_i4(ptr long) @ cdecl _vcomp_atomic_sub_i4(ptr long)
@ cdecl _vcomp_atomic_sub_i8(ptr int64) @ cdecl _vcomp_atomic_sub_i8(ptr int64)
@ cdecl _vcomp_atomic_sub_r4(ptr float) @ cdecl _vcomp_atomic_sub_r4(ptr float)
@ cdecl _vcomp_atomic_sub_r8(ptr double) @ cdecl _vcomp_atomic_sub_r8(ptr double)
@ stub _vcomp_atomic_xor_i1 @ cdecl _vcomp_atomic_xor_i1(ptr long)
@ cdecl _vcomp_atomic_xor_i2(ptr long) @ cdecl _vcomp_atomic_xor_i2(ptr long)
@ cdecl _vcomp_atomic_xor_i4(ptr long) @ cdecl _vcomp_atomic_xor_i4(ptr long)
@ cdecl _vcomp_atomic_xor_i8(ptr int64) @ cdecl _vcomp_atomic_xor_i8(ptr int64)
......
@ stub _vcomp_atomic_add_i1 @ cdecl _vcomp_atomic_add_i1(ptr long) vcomp._vcomp_atomic_add_i1
@ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2 @ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2
@ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4 @ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4
@ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8 @ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8
@ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4 @ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4
@ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8 @ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8
@ stub _vcomp_atomic_and_i1 @ cdecl _vcomp_atomic_and_i1(ptr long) vcomp._vcomp_atomic_and_i1
@ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2 @ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2
@ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4 @ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4
@ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8 @ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8
@ stub _vcomp_atomic_div_i1 @ cdecl _vcomp_atomic_div_i1(ptr long) vcomp._vcomp_atomic_div_i1
@ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2 @ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2
@ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4 @ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4
@ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8 @ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8
@ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4 @ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4
@ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8 @ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8
@ stub _vcomp_atomic_div_ui1 @ cdecl _vcomp_atomic_div_ui1(ptr long) vcomp._vcomp_atomic_div_ui1
@ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2 @ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2
@ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4 @ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4
@ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8 @ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8
@ stub _vcomp_atomic_mul_i1 @ cdecl _vcomp_atomic_mul_i1(ptr long) vcomp._vcomp_atomic_mul_i1
@ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2 @ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2
@ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4 @ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4
@ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8 @ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8
@ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4 @ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4
@ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8 @ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8
@ stub _vcomp_atomic_or_i1 @ cdecl _vcomp_atomic_or_i1(ptr long) vcomp._vcomp_atomic_or_i1
@ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2 @ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2
@ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4 @ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4
@ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8 @ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8
@ stub _vcomp_atomic_shl_i1 @ cdecl _vcomp_atomic_shl_i1(ptr long) vcomp._vcomp_atomic_shl_i1
@ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2 @ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2
@ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4 @ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4
@ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8 @ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8
@ stub _vcomp_atomic_shr_i1 @ cdecl _vcomp_atomic_shr_i1(ptr long) vcomp._vcomp_atomic_shr_i1
@ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2 @ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2
@ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4 @ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4
@ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8 @ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8
@ stub _vcomp_atomic_shr_ui1 @ cdecl _vcomp_atomic_shr_ui1(ptr long) vcomp._vcomp_atomic_shr_ui1
@ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2 @ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2
@ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4 @ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4
@ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8 @ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8
@ stub _vcomp_atomic_sub_i1 @ cdecl _vcomp_atomic_sub_i1(ptr long) vcomp._vcomp_atomic_sub_i1
@ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2 @ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2
@ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4 @ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4
@ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8 @ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8
@ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4 @ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4
@ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8 @ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8
@ stub _vcomp_atomic_xor_i1 @ cdecl _vcomp_atomic_xor_i1(ptr long) vcomp._vcomp_atomic_xor_i1
@ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2 @ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2
@ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4 @ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4
@ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8 @ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8
......
@ stub C2VectParallel @ stub C2VectParallel
@ stub _vcomp_atomic_add_i1 @ cdecl _vcomp_atomic_add_i1(ptr long) vcomp._vcomp_atomic_add_i1
@ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2 @ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2
@ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4 @ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4
@ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8 @ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8
@ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4 @ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4
@ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8 @ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8
@ stub _vcomp_atomic_and_i1 @ cdecl _vcomp_atomic_and_i1(ptr long) vcomp._vcomp_atomic_and_i1
@ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2 @ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2
@ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4 @ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4
@ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8 @ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8
@ stub _vcomp_atomic_div_i1 @ cdecl _vcomp_atomic_div_i1(ptr long) vcomp._vcomp_atomic_div_i1
@ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2 @ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2
@ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4 @ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4
@ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8 @ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8
@ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4 @ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4
@ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8 @ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8
@ stub _vcomp_atomic_div_ui1 @ cdecl _vcomp_atomic_div_ui1(ptr long) vcomp._vcomp_atomic_div_ui1
@ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2 @ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2
@ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4 @ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4
@ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8 @ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8
@ stub _vcomp_atomic_mul_i1 @ cdecl _vcomp_atomic_mul_i1(ptr long) vcomp._vcomp_atomic_mul_i1
@ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2 @ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2
@ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4 @ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4
@ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8 @ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8
@ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4 @ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4
@ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8 @ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8
@ stub _vcomp_atomic_or_i1 @ cdecl _vcomp_atomic_or_i1(ptr long) vcomp._vcomp_atomic_or_i1
@ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2 @ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2
@ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4 @ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4
@ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8 @ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8
@ stub _vcomp_atomic_shl_i1 @ cdecl _vcomp_atomic_shl_i1(ptr long) vcomp._vcomp_atomic_shl_i1
@ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2 @ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2
@ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4 @ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4
@ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8 @ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8
@ stub _vcomp_atomic_shr_i1 @ cdecl _vcomp_atomic_shr_i1(ptr long) vcomp._vcomp_atomic_shr_i1
@ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2 @ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2
@ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4 @ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4
@ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8 @ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8
@ stub _vcomp_atomic_shr_ui1 @ cdecl _vcomp_atomic_shr_ui1(ptr long) vcomp._vcomp_atomic_shr_ui1
@ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2 @ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2
@ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4 @ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4
@ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8 @ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8
@ stub _vcomp_atomic_sub_i1 @ cdecl _vcomp_atomic_sub_i1(ptr long) vcomp._vcomp_atomic_sub_i1
@ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2 @ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2
@ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4 @ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4
@ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8 @ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8
@ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4 @ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4
@ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8 @ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8
@ stub _vcomp_atomic_xor_i1 @ cdecl _vcomp_atomic_xor_i1(ptr long) vcomp._vcomp_atomic_xor_i1
@ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2 @ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2
@ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4 @ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4
@ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8 @ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8
......
@ stub C2VectParallel @ stub C2VectParallel
@ stub _vcomp_atomic_add_i1 @ cdecl _vcomp_atomic_add_i1(ptr long) vcomp._vcomp_atomic_add_i1
@ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2 @ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2
@ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4 @ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4
@ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8 @ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8
@ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4 @ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4
@ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8 @ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8
@ stub _vcomp_atomic_and_i1 @ cdecl _vcomp_atomic_and_i1(ptr long) vcomp._vcomp_atomic_and_i1
@ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2 @ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2
@ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4 @ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4
@ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8 @ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8
@ stub _vcomp_atomic_div_i1 @ cdecl _vcomp_atomic_div_i1(ptr long) vcomp._vcomp_atomic_div_i1
@ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2 @ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2
@ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4 @ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4
@ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8 @ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8
@ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4 @ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4
@ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8 @ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8
@ stub _vcomp_atomic_div_ui1 @ cdecl _vcomp_atomic_div_ui1(ptr long) vcomp._vcomp_atomic_div_ui1
@ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2 @ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2
@ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4 @ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4
@ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8 @ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8
@ stub _vcomp_atomic_mul_i1 @ cdecl _vcomp_atomic_mul_i1(ptr long) vcomp._vcomp_atomic_mul_i1
@ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2 @ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2
@ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4 @ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4
@ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8 @ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8
@ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4 @ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4
@ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8 @ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8
@ stub _vcomp_atomic_or_i1 @ cdecl _vcomp_atomic_or_i1(ptr long) vcomp._vcomp_atomic_or_i1
@ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2 @ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2
@ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4 @ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4
@ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8 @ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8
@ stub _vcomp_atomic_shl_i1 @ cdecl _vcomp_atomic_shl_i1(ptr long) vcomp._vcomp_atomic_shl_i1
@ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2 @ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2
@ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4 @ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4
@ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8 @ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8
@ stub _vcomp_atomic_shr_i1 @ cdecl _vcomp_atomic_shr_i1(ptr long) vcomp._vcomp_atomic_shr_i1
@ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2 @ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2
@ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4 @ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4
@ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8 @ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8
@ stub _vcomp_atomic_shr_ui1 @ cdecl _vcomp_atomic_shr_ui1(ptr long) vcomp._vcomp_atomic_shr_ui1
@ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2 @ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2
@ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4 @ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4
@ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8 @ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8
@ stub _vcomp_atomic_sub_i1 @ cdecl _vcomp_atomic_sub_i1(ptr long) vcomp._vcomp_atomic_sub_i1
@ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2 @ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2
@ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4 @ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4
@ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8 @ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8
@ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4 @ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4
@ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8 @ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8
@ stub _vcomp_atomic_xor_i1 @ cdecl _vcomp_atomic_xor_i1(ptr long) vcomp._vcomp_atomic_xor_i1
@ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2 @ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2
@ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4 @ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4
@ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8 @ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8
......
@ stub _vcomp_atomic_add_i1 @ cdecl _vcomp_atomic_add_i1(ptr long) vcomp._vcomp_atomic_add_i1
@ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2 @ cdecl _vcomp_atomic_add_i2(ptr long) vcomp._vcomp_atomic_add_i2
@ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4 @ cdecl _vcomp_atomic_add_i4(ptr long) vcomp._vcomp_atomic_add_i4
@ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8 @ cdecl _vcomp_atomic_add_i8(ptr int64) vcomp._vcomp_atomic_add_i8
@ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4 @ cdecl _vcomp_atomic_add_r4(ptr float) vcomp._vcomp_atomic_add_r4
@ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8 @ cdecl _vcomp_atomic_add_r8(ptr double) vcomp._vcomp_atomic_add_r8
@ stub _vcomp_atomic_and_i1 @ cdecl _vcomp_atomic_and_i1(ptr long) vcomp._vcomp_atomic_and_i1
@ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2 @ cdecl _vcomp_atomic_and_i2(ptr long) vcomp._vcomp_atomic_and_i2
@ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4 @ cdecl _vcomp_atomic_and_i4(ptr long) vcomp._vcomp_atomic_and_i4
@ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8 @ cdecl _vcomp_atomic_and_i8(ptr int64) vcomp._vcomp_atomic_and_i8
@ stub _vcomp_atomic_div_i1 @ cdecl _vcomp_atomic_div_i1(ptr long) vcomp._vcomp_atomic_div_i1
@ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2 @ cdecl _vcomp_atomic_div_i2(ptr long) vcomp._vcomp_atomic_div_i2
@ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4 @ cdecl _vcomp_atomic_div_i4(ptr long) vcomp._vcomp_atomic_div_i4
@ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8 @ cdecl _vcomp_atomic_div_i8(ptr int64) vcomp._vcomp_atomic_div_i8
@ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4 @ cdecl _vcomp_atomic_div_r4(ptr float) vcomp._vcomp_atomic_div_r4
@ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8 @ cdecl _vcomp_atomic_div_r8(ptr double) vcomp._vcomp_atomic_div_r8
@ stub _vcomp_atomic_div_ui1 @ cdecl _vcomp_atomic_div_ui1(ptr long) vcomp._vcomp_atomic_div_ui1
@ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2 @ cdecl _vcomp_atomic_div_ui2(ptr long) vcomp._vcomp_atomic_div_ui2
@ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4 @ cdecl _vcomp_atomic_div_ui4(ptr long) vcomp._vcomp_atomic_div_ui4
@ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8 @ cdecl _vcomp_atomic_div_ui8(ptr int64) vcomp._vcomp_atomic_div_ui8
@ stub _vcomp_atomic_mul_i1 @ cdecl _vcomp_atomic_mul_i1(ptr long) vcomp._vcomp_atomic_mul_i1
@ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2 @ cdecl _vcomp_atomic_mul_i2(ptr long) vcomp._vcomp_atomic_mul_i2
@ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4 @ cdecl _vcomp_atomic_mul_i4(ptr long) vcomp._vcomp_atomic_mul_i4
@ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8 @ cdecl _vcomp_atomic_mul_i8(ptr int64) vcomp._vcomp_atomic_mul_i8
@ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4 @ cdecl _vcomp_atomic_mul_r4(ptr float) vcomp._vcomp_atomic_mul_r4
@ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8 @ cdecl _vcomp_atomic_mul_r8(ptr double) vcomp._vcomp_atomic_mul_r8
@ stub _vcomp_atomic_or_i1 @ cdecl _vcomp_atomic_or_i1(ptr long) vcomp._vcomp_atomic_or_i1
@ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2 @ cdecl _vcomp_atomic_or_i2(ptr long) vcomp._vcomp_atomic_or_i2
@ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4 @ cdecl _vcomp_atomic_or_i4(ptr long) vcomp._vcomp_atomic_or_i4
@ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8 @ cdecl _vcomp_atomic_or_i8(ptr int64) vcomp._vcomp_atomic_or_i8
@ stub _vcomp_atomic_shl_i1 @ cdecl _vcomp_atomic_shl_i1(ptr long) vcomp._vcomp_atomic_shl_i1
@ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2 @ cdecl _vcomp_atomic_shl_i2(ptr long) vcomp._vcomp_atomic_shl_i2
@ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4 @ cdecl _vcomp_atomic_shl_i4(ptr long) vcomp._vcomp_atomic_shl_i4
@ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8 @ cdecl _vcomp_atomic_shl_i8(ptr long) vcomp._vcomp_atomic_shl_i8
@ stub _vcomp_atomic_shr_i1 @ cdecl _vcomp_atomic_shr_i1(ptr long) vcomp._vcomp_atomic_shr_i1
@ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2 @ cdecl _vcomp_atomic_shr_i2(ptr long) vcomp._vcomp_atomic_shr_i2
@ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4 @ cdecl _vcomp_atomic_shr_i4(ptr long) vcomp._vcomp_atomic_shr_i4
@ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8 @ cdecl _vcomp_atomic_shr_i8(ptr long) vcomp._vcomp_atomic_shr_i8
@ stub _vcomp_atomic_shr_ui1 @ cdecl _vcomp_atomic_shr_ui1(ptr long) vcomp._vcomp_atomic_shr_ui1
@ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2 @ cdecl _vcomp_atomic_shr_ui2(ptr long) vcomp._vcomp_atomic_shr_ui2
@ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4 @ cdecl _vcomp_atomic_shr_ui4(ptr long) vcomp._vcomp_atomic_shr_ui4
@ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8 @ cdecl _vcomp_atomic_shr_ui8(ptr long) vcomp._vcomp_atomic_shr_ui8
@ stub _vcomp_atomic_sub_i1 @ cdecl _vcomp_atomic_sub_i1(ptr long) vcomp._vcomp_atomic_sub_i1
@ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2 @ cdecl _vcomp_atomic_sub_i2(ptr long) vcomp._vcomp_atomic_sub_i2
@ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4 @ cdecl _vcomp_atomic_sub_i4(ptr long) vcomp._vcomp_atomic_sub_i4
@ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8 @ cdecl _vcomp_atomic_sub_i8(ptr int64) vcomp._vcomp_atomic_sub_i8
@ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4 @ cdecl _vcomp_atomic_sub_r4(ptr float) vcomp._vcomp_atomic_sub_r4
@ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8 @ cdecl _vcomp_atomic_sub_r8(ptr double) vcomp._vcomp_atomic_sub_r8
@ stub _vcomp_atomic_xor_i1 @ cdecl _vcomp_atomic_xor_i1(ptr long) vcomp._vcomp_atomic_xor_i1
@ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2 @ cdecl _vcomp_atomic_xor_i2(ptr long) vcomp._vcomp_atomic_xor_i2
@ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4 @ cdecl _vcomp_atomic_xor_i4(ptr long) vcomp._vcomp_atomic_xor_i4
@ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8 @ cdecl _vcomp_atomic_xor_i8(ptr int64) vcomp._vcomp_atomic_xor_i8
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment