Changeset 49724 in vbox for trunk/include
- Timestamp:
- Nov 29, 2013 1:28:54 PM (11 years ago)
- svn:sync-xref-src-repo-rev:
- 91020
- Location:
- trunk/include/iprt
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asm.h
r44528 r49724 2993 2993 ASMAtomicOrU64((uint64_t volatile *)pi64, i64); 2994 2994 } 2995 2996 2995 2997 /** 2996 2998 * Atomically And an unsigned 32-bit value, ordered. … … 3083 3085 { 3084 3086 ASMAtomicAndU64((uint64_t volatile *)pi64, (uint64_t)i64); 3087 } 3088 3089 3090 /** 3091 * Atomically OR an unsigned 32-bit value, unordered but interrupt safe. 3092 * 3093 * @param pu32 Pointer to the pointer variable to OR u32 with. 3094 * @param u32 The value to OR *pu32 with. 3095 */ 3096 #if RT_INLINE_ASM_EXTERNAL 3097 DECLASM(void) ASMAtomicUoOrU32(uint32_t volatile *pu32, uint32_t u32); 3098 #else 3099 DECLINLINE(void) ASMAtomicUoOrU32(uint32_t volatile *pu32, uint32_t u32) 3100 { 3101 # if RT_INLINE_ASM_GNU_STYLE 3102 __asm__ __volatile__("orl %1, %0\n\t" 3103 : "=m" (*pu32) 3104 : "ir" (u32), 3105 "m" (*pu32)); 3106 # else 3107 __asm 3108 { 3109 mov eax, [u32] 3110 # ifdef RT_ARCH_AMD64 3111 mov rdx, [pu32] 3112 or [rdx], eax 3113 # else 3114 mov edx, [pu32] 3115 or [edx], eax 3116 # endif 3117 } 3118 # endif 3119 } 3120 #endif 3121 3122 3123 /** 3124 * Atomically OR a signed 32-bit value, unordered. 3125 * 3126 * @param pi32 Pointer to the pointer variable to OR u32 with. 3127 * @param i32 The value to OR *pu32 with. 3128 */ 3129 DECLINLINE(void) ASMAtomicUoOrS32(int32_t volatile *pi32, int32_t i32) 3130 { 3131 ASMAtomicUoOrU32((uint32_t volatile *)pi32, i32); 3132 } 3133 3134 3135 /** 3136 * Atomically OR an unsigned 64-bit value, unordered. 3137 * 3138 * @param pu64 Pointer to the pointer variable to OR u64 with. 3139 * @param u64 The value to OR *pu64 with. 3140 */ 3141 #if RT_INLINE_ASM_EXTERNAL 3142 DECLASM(void) ASMAtomicUoOrU64(uint64_t volatile *pu64, uint64_t u64); 3143 #else 3144 DECLINLINE(void) ASMAtomicUoOrU64(uint64_t volatile *pu64, uint64_t u64) 3145 { 3146 # if RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 3147 __asm__ __volatile__("orq %1, %q0\n\t" 3148 : "=m" (*pu64) 3149 : "r" (u64), 3150 "m" (*pu64)); 3151 # else 3152 for (;;) 3153 { 3154 uint64_t u64Old = ASMAtomicUoReadU64(pu64); 3155 uint64_t u64New = u64Old | u64; 3156 if (ASMAtomicCmpXchgU64(pu64, u64New, u64Old)) 3157 break; 3158 ASMNopPause(); 3159 } 3160 # endif 3161 } 3162 #endif 3163 3164 3165 /** 3166 * Atomically Or a signed 64-bit value, unordered. 3167 * 3168 * @param pi64 Pointer to the pointer variable to OR u64 with. 3169 * @param i64 The value to OR *pu64 with. 3170 */ 3171 DECLINLINE(void) ASMAtomicUoOrS64(int64_t volatile *pi64, int64_t i64) 3172 { 3173 ASMAtomicUoOrU64((uint64_t volatile *)pi64, i64); 3174 } 3175 3176 3177 /** 3178 * Atomically And an unsigned 32-bit value, unordered. 3179 * 3180 * @param pu32 Pointer to the pointer variable to AND u32 with. 3181 * @param u32 The value to AND *pu32 with. 3182 */ 3183 #if RT_INLINE_ASM_EXTERNAL 3184 DECLASM(void) ASMAtomicUoAndU32(uint32_t volatile *pu32, uint32_t u32); 3185 #else 3186 DECLINLINE(void) ASMAtomicUoAndU32(uint32_t volatile *pu32, uint32_t u32) 3187 { 3188 # if RT_INLINE_ASM_GNU_STYLE 3189 __asm__ __volatile__("andl %1, %0\n\t" 3190 : "=m" (*pu32) 3191 : "ir" (u32), 3192 "m" (*pu32)); 3193 # else 3194 __asm 3195 { 3196 mov eax, [u32] 3197 # ifdef RT_ARCH_AMD64 3198 mov rdx, [pu32] 3199 and [rdx], eax 3200 # else 3201 mov edx, [pu32] 3202 and [edx], eax 3203 # endif 3204 } 3205 # endif 3206 } 3207 #endif 3208 3209 3210 /** 3211 * Atomically And a signed 32-bit value, unordered. 3212 * 3213 * @param pi32 Pointer to the pointer variable to AND i32 with. 3214 * @param i32 The value to AND *pi32 with. 3215 */ 3216 DECLINLINE(void) ASMAtomicUoAndS32(int32_t volatile *pi32, int32_t i32) 3217 { 3218 ASMAtomicUoAndU32((uint32_t volatile *)pi32, (uint32_t)i32); 3219 } 3220 3221 3222 /** 3223 * Atomically And an unsigned 64-bit value, unordered. 3224 * 3225 * @param pu64 Pointer to the pointer variable to AND u64 with. 3226 * @param u64 The value to AND *pu64 with. 3227 */ 3228 #if RT_INLINE_ASM_EXTERNAL 3229 DECLASM(void) ASMAtomicUoAndU64(uint64_t volatile *pu64, uint64_t u64); 3230 #else 3231 DECLINLINE(void) ASMAtomicUoAndU64(uint64_t volatile *pu64, uint64_t u64) 3232 { 3233 # if RT_INLINE_ASM_GNU_STYLE && defined(RT_ARCH_AMD64) 3234 __asm__ __volatile__("andq %1, %0\n\t" 3235 : "=m" (*pu64) 3236 : "r" (u64), 3237 "m" (*pu64)); 3238 # else 3239 for (;;) 3240 { 3241 uint64_t u64Old = ASMAtomicUoReadU64(pu64); 3242 uint64_t u64New = u64Old & u64; 3243 if (ASMAtomicCmpXchgU64(pu64, u64New, u64Old)) 3244 break; 3245 ASMNopPause(); 3246 } 3247 # endif 3248 } 3249 #endif 3250 3251 3252 /** 3253 * Atomically And a signed 64-bit value, unordered. 3254 * 3255 * @param pi64 Pointer to the pointer variable to AND i64 with. 3256 * @param i64 The value to AND *pi64 with. 3257 */ 3258 DECLINLINE(void) ASMAtomicUoAndS64(int64_t volatile *pi64, int64_t i64) 3259 { 3260 ASMAtomicUoAndU64((uint64_t volatile *)pi64, (uint64_t)i64); 3085 3261 } 3086 3262 -
trunk/include/iprt/mangling.h
r49326 r49724 56 56 # define ASMAtomicUoReadU64 RT_MANGLER(ASMAtomicUoReadU64) /* not-some-systems... */ 57 57 # define ASMAtomicUoReadU64_EndProc RT_MANGLER(ASMAtomicUoReadU64_EndProc) 58 # define ASMAtomicUoAndU64 RT_MANGLER(ASMAtomicUoAndU64) /* not-some-systems... */ 59 # define ASMAtomicUoAndU64_EndProc RT_MANGLER(ASMAtomicUoAndU64_EndProc) 60 # define ASMAtomicUoAndU32 RT_MANGLER(ASMAtomicUoAndU32) /* not-some-systems... */ 61 # define ASMAtomicUoAndU32_EndProc RT_MANGLER(ASMAtomicUoAndU32_EndProc) 62 # define ASMAtomicUoOrU64 RT_MANGLER(ASMAtomicUoOrU64) /* not-some-systems... */ 63 # define ASMAtomicUoOrU64_EndProc RT_MANGLER(ASMAtomicUoOrU64_EndProc) 64 # define ASMAtomicUoOrU32 RT_MANGLER(ASMAtomicUoOrU32) /* not-some-systems... */ 65 # define ASMAtomicUoOrU32_EndProc RT_MANGLER(ASMAtomicUoOrU32_EndProc) 58 66 # define ASMAtomicXchgU64 RT_MANGLER(ASMAtomicXchgU64) /* not-some-systems... */ 59 67 # define ASMAtomicXchgU64_EndProc RT_MANGLER(ASMAtomicXchgU64_EndProc)
Note:
See TracChangeset
for help on using the changeset viewer.