VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 87221

最後變更 在這個檔案從87221是 87221,由 vboxsync 提交於 4 年 前

tstRTInlineAsm: Extending testcase. bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Id Revision
檔案大小: 135.7 KB
 
1/* $Id: tstRTInlineAsm.cpp 87221 2021-01-12 08:06:02Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.alldomusa.eu.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include <iprt/asm-math.h>
33
34/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
35 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
36 * fine. */
37#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
38# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
39# define GCC44_32BIT_PIC
40# endif
41#endif
42
43#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
44# include <iprt/asm-amd64-x86.h>
45# include <iprt/x86.h>
46#else
47# include <iprt/time.h>
48#endif
49#include <iprt/mem.h>
50#include <iprt/param.h>
51#include <iprt/rand.h>
52#include <iprt/stream.h>
53#include <iprt/string.h>
54#include <iprt/thread.h>
55#include <iprt/test.h>
56#include <iprt/time.h>
57
58
59
60/*********************************************************************************************************************************
61* Defined Constants And Macros *
62*********************************************************************************************************************************/
63#define CHECKVAL(val, expect, fmt) \
64 do \
65 { \
66 if ((val) != (expect)) \
67 { \
68 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
69 } \
70 } while (0)
71
72#define CHECKOP(op, expect, fmt, type) \
73 do \
74 { \
75 type val = op; \
76 if (val != (type)(expect)) \
77 { \
78 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
79 } \
80 } while (0)
81
82#define CHECK_OP_AND_VAL(a_Type, a_Fmt, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
83 do { \
84 CHECKOP(a_Operation, a_ExpectRetVal, a_Fmt, a_Type); \
85 CHECKVAL(*a_pVar, a_ExpectVarVal, a_Fmt); \
86 } while (0)
87
88#define CHECK_OP_AND_VAL_EX(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
89 do { \
90 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
91 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
92 } while (0)
93
94#define CHECK_OP_AND_VAL_EX2(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_uVar2, a_Operation, a_ExpectRetVal, a_ExpectVarVal, a_ExpectVarVal2) \
95 do { \
96 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
97 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
98 CHECKVAL(a_uVar2, a_ExpectVarVal2, a_FmtVar); \
99 } while (0)
100
101
102/**
103 * Calls a worker function with different worker variable storage types.
104 */
105#define DO_SIMPLE_TEST(name, type) \
106 do \
107 { \
108 RTTestISub(#name); \
109 type StackVar; \
110 tst ## name ## Worker(&StackVar); \
111 \
112 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
113 RTTEST_CHECK_BREAK(g_hTest, pVar); \
114 tst ## name ## Worker(pVar); \
115 RTTestGuardedFree(g_hTest, pVar); \
116 \
117 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
118 RTTEST_CHECK_BREAK(g_hTest, pVar); \
119 tst ## name ## Worker(pVar); \
120 RTTestGuardedFree(g_hTest, pVar); \
121 } while (0)
122
123
124/*********************************************************************************************************************************
125* Global Variables *
126*********************************************************************************************************************************/
127/** The test instance. */
128static RTTEST g_hTest;
129
130
131
132#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
133
134const char *getCacheAss(unsigned u)
135{
136 if (u == 0)
137 return "res0 ";
138 if (u == 1)
139 return "direct";
140 if (u >= 256)
141 return "???";
142
143 char *pszRet = NULL;
144 RTStrAPrintf(&pszRet, "%d way", u);
145 RTMEM_WILL_LEAK(pszRet);
146 return pszRet;
147}
148
149
150const char *getL2CacheAss(unsigned u)
151{
152 switch (u)
153 {
154 case 0: return "off ";
155 case 1: return "direct";
156 case 2: return "2 way ";
157 case 3: return "res3 ";
158 case 4: return "4 way ";
159 case 5: return "res5 ";
160 case 6: return "8 way ";
161 case 7: return "res7 ";
162 case 8: return "16 way";
163 case 9: return "res9 ";
164 case 10: return "res10 ";
165 case 11: return "res11 ";
166 case 12: return "res12 ";
167 case 13: return "res13 ";
168 case 14: return "res14 ";
169 case 15: return "fully ";
170 default:
171 return "????";
172 }
173}
174
175
176/**
177 * Test and dump all possible info from the CPUID instruction.
178 *
179 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
180 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
181 */
182void tstASMCpuId(void)
183{
184 RTTestISub("ASMCpuId");
185
186 unsigned iBit;
187 struct
188 {
189 uint32_t uEBX, uEAX, uEDX, uECX;
190 } s;
191 if (!ASMHasCpuId())
192 {
193 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
194 return;
195 }
196
197 /*
198 * Try the 0 function and use that for checking the ASMCpuId_* variants.
199 */
200 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
201
202 uint32_t u32;
203
204 u32 = ASMCpuId_EAX(0);
205 CHECKVAL(u32, s.uEAX, "%x");
206 u32 = ASMCpuId_EBX(0);
207 CHECKVAL(u32, s.uEBX, "%x");
208 u32 = ASMCpuId_ECX(0);
209 CHECKVAL(u32, s.uECX, "%x");
210 u32 = ASMCpuId_EDX(0);
211 CHECKVAL(u32, s.uEDX, "%x");
212
213 uint32_t uECX2 = s.uECX - 1;
214 uint32_t uEDX2 = s.uEDX - 1;
215 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
216 CHECKVAL(uECX2, s.uECX, "%x");
217 CHECKVAL(uEDX2, s.uEDX, "%x");
218
219 uint32_t uEAX2 = s.uEAX - 1;
220 uint32_t uEBX2 = s.uEBX - 1;
221 uECX2 = s.uECX - 1;
222 uEDX2 = s.uEDX - 1;
223 ASMCpuIdExSlow(0, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
224 CHECKVAL(uEAX2, s.uEAX, "%x");
225 CHECKVAL(uEBX2, s.uEBX, "%x");
226 CHECKVAL(uECX2, s.uECX, "%x");
227 CHECKVAL(uEDX2, s.uEDX, "%x");
228
229 /*
230 * Check the extended APIC stuff.
231 */
232 uint32_t idExtApic;
233 if (ASMCpuId_EAX(0) >= 0xb)
234 {
235 uint8_t idApic = ASMGetApicId();
236 do
237 {
238 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
239 ASMCpuIdExSlow(0xb, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
240 idExtApic = ASMGetApicIdExt0B();
241 } while (ASMGetApicId() != idApic);
242
243 CHECKVAL(uEDX2, idExtApic, "%x");
244 if (idApic != (uint8_t)idExtApic && uECX2 != 0)
245 RTTestIFailed("ASMGetApicIdExt0B() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
246 }
247 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
248 {
249 uint8_t idApic = ASMGetApicId();
250 do
251 {
252 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
253 ASMCpuIdExSlow(0x8000001e, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
254 idExtApic = ASMGetApicIdExt8000001E();
255 } while (ASMGetApicId() != idApic);
256 CHECKVAL(uEAX2, idExtApic, "%x");
257 if (idApic != (uint8_t)idExtApic)
258 RTTestIFailed("ASMGetApicIdExt8000001E() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
259 }
260
261 /*
262 * Done testing, dump the information.
263 */
264 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
265 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
266 const uint32_t cFunctions = s.uEAX;
267
268 /* raw dump */
269 RTTestIPrintf(RTTESTLVL_ALWAYS,
270 "\n"
271 " RAW Standard CPUIDs\n"
272 "Function eax ebx ecx edx\n");
273 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
274 {
275 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
276 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
277 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
278
279 /* Some leafs output depend on the initial value of ECX.
280 * The same seems to apply to invalid standard functions */
281 if (iStd > cFunctions)
282 continue;
283 if (iStd == 0x04) /* Deterministic Cache Parameters Leaf */
284 for (uint32_t uECX = 1; s.uEAX & 0x1f; uECX++)
285 {
286 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
287 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
288 RTTESTI_CHECK_BREAK(uECX < 128);
289 }
290 else if (iStd == 0x07) /* Structured Extended Feature Flags */
291 {
292 uint32_t uMax = s.uEAX;
293 for (uint32_t uECX = 1; uECX < uMax; uECX++)
294 {
295 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
296 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
297 RTTESTI_CHECK_BREAK(uECX < 128);
298 }
299 }
300 else if (iStd == 0x0b) /* Extended Topology Enumeration Leafs */
301 for (uint32_t uECX = 1; (s.uEAX & 0x1f) && (s.uEBX & 0xffff); uECX++)
302 {
303 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
304 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
305 RTTESTI_CHECK_BREAK(uECX < 128);
306 }
307 else if (iStd == 0x0d) /* Extended State Enumeration Leafs */
308 for (uint32_t uECX = 1; s.uEAX != 0 || s.uEBX != 0 || s.uECX != 0 || s.uEDX != 0; uECX++)
309 {
310 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
311 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
312 RTTESTI_CHECK_BREAK(uECX < 128);
313 }
314 else if ( iStd == 0x0f /* Platform quality of service monitoring (PQM) */
315 || iStd == 0x10 /* Platform quality of service enforcement (PQE) */
316 || iStd == 0x12 /* SGX Enumeration */
317 || iStd == 0x14 /* Processor Trace Enumeration */
318 || iStd == 0x17 /* SoC Vendor Attribute Enumeration */
319 || iStd == 0x18 /* Deterministic Address Translation Parameters */)
320 {
321 /** @todo */
322 }
323 else
324 {
325 u32 = ASMCpuId_EAX(iStd);
326 CHECKVAL(u32, s.uEAX, "%x");
327
328 uint32_t u32EbxMask = UINT32_MAX;
329 if (iStd == 1)
330 u32EbxMask = UINT32_C(0x00ffffff); /* Omit the local apic ID in case we're rescheduled. */
331 u32 = ASMCpuId_EBX(iStd);
332 CHECKVAL(u32 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
333
334 u32 = ASMCpuId_ECX(iStd);
335 CHECKVAL(u32, s.uECX, "%x");
336 u32 = ASMCpuId_EDX(iStd);
337 CHECKVAL(u32, s.uEDX, "%x");
338
339 uECX2 = s.uECX - 1;
340 uEDX2 = s.uEDX - 1;
341 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
342 CHECKVAL(uECX2, s.uECX, "%x");
343 CHECKVAL(uEDX2, s.uEDX, "%x");
344
345 uEAX2 = s.uEAX - 1;
346 uEBX2 = s.uEBX - 1;
347 uECX2 = s.uECX - 1;
348 uEDX2 = s.uEDX - 1;
349 ASMCpuId(iStd, &uEAX2, &uEBX2, &uECX2, &uEDX2);
350 CHECKVAL(uEAX2, s.uEAX, "%x");
351 CHECKVAL(uEBX2 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
352 CHECKVAL(uECX2, s.uECX, "%x");
353 CHECKVAL(uEDX2, s.uEDX, "%x");
354 }
355 }
356
357 /*
358 * Understandable output
359 */
360 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
361 RTTestIPrintf(RTTESTLVL_ALWAYS,
362 "Name: %.04s%.04s%.04s\n"
363 "Support: 0-%u\n",
364 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
365 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
366
367 /*
368 * Get Features.
369 */
370 if (cFunctions >= 1)
371 {
372 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
373 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
374 RTTestIPrintf(RTTESTLVL_ALWAYS,
375 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
376 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
377 "Stepping: %d\n"
378 "Type: %d (%s)\n"
379 "APIC ID: %#04x\n"
380 "Logical CPUs: %d\n"
381 "CLFLUSH Size: %d\n"
382 "Brand ID: %#04x\n",
383 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
384 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
385 ASMGetCpuStepping(s.uEAX),
386 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
387 (s.uEBX >> 24) & 0xff,
388 (s.uEBX >> 16) & 0xff,
389 (s.uEBX >> 8) & 0xff,
390 (s.uEBX >> 0) & 0xff);
391
392 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
393 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
394 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
395 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
396 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
397 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
398 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
399 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
400 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
401 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
402 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
403 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
404 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
405 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
406 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
407 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
408 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
409 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
410 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
411 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
412 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
413 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
414 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
415 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
416 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
417 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
418 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
419 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
420 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
421 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
422 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
423 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
424 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
425 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
426
427 /** @todo check intel docs. */
428 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
429 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
430 for (iBit = 1; iBit < 13; iBit++)
431 if (s.uECX & RT_BIT(iBit))
432 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
433 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
434 for (iBit = 14; iBit < 32; iBit++)
435 if (s.uECX & RT_BIT(iBit))
436 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
437 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
438 }
439 if (ASMCpuId_EAX(0) >= 0xb)
440 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 0b): %#010x\n", ASMGetApicIdExt0B());
441
442 /*
443 * Extended.
444 * Implemented after AMD specs.
445 */
446 /** @todo check out the intel specs. */
447 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
448 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
449 {
450 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
451 return;
452 }
453 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
454
455 /* raw dump */
456 RTTestIPrintf(RTTESTLVL_ALWAYS,
457 "\n"
458 " RAW Extended CPUIDs\n"
459 "Function eax ebx ecx edx\n");
460 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
461 {
462 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
463 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
464 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
465
466 if (iExt > cExtFunctions)
467 continue; /* Invalid extended functions seems change the value if ECX changes */
468 if (iExt == 0x8000001d)
469 continue; /* Takes cache level in ecx. */
470
471 u32 = ASMCpuId_EAX(iExt);
472 CHECKVAL(u32, s.uEAX, "%x");
473 u32 = ASMCpuId_EBX(iExt);
474 CHECKVAL(u32, s.uEBX, "%x");
475 u32 = ASMCpuId_ECX(iExt);
476 CHECKVAL(u32, s.uECX, "%x");
477 u32 = ASMCpuId_EDX(iExt);
478 CHECKVAL(u32, s.uEDX, "%x");
479
480 uECX2 = s.uECX - 1;
481 uEDX2 = s.uEDX - 1;
482 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
483 CHECKVAL(uECX2, s.uECX, "%x");
484 CHECKVAL(uEDX2, s.uEDX, "%x");
485
486 uEAX2 = s.uEAX - 1;
487 uEBX2 = s.uEBX - 1;
488 uECX2 = s.uECX - 1;
489 uEDX2 = s.uEDX - 1;
490 ASMCpuId(iExt, &uEAX2, &uEBX2, &uECX2, &uEDX2);
491 CHECKVAL(uEAX2, s.uEAX, "%x");
492 CHECKVAL(uEBX2, s.uEBX, "%x");
493 CHECKVAL(uECX2, s.uECX, "%x");
494 CHECKVAL(uEDX2, s.uEDX, "%x");
495 }
496
497 /*
498 * Understandable output
499 */
500 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
501 RTTestIPrintf(RTTESTLVL_ALWAYS,
502 "Ext Name: %.4s%.4s%.4s\n"
503 "Ext Supports: 0x80000000-%#010x\n",
504 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
505
506 if (cExtFunctions >= 0x80000001)
507 {
508 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
509 RTTestIPrintf(RTTESTLVL_ALWAYS,
510 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
511 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
512 "Stepping: %d\n"
513 "Brand ID: %#05x\n",
514 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
515 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
516 ASMGetCpuStepping(s.uEAX),
517 s.uEBX & 0xfff);
518
519 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
520 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
521 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
522 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
523 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
524 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
525 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
526 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
527 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
528 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
529 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
530 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
531 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
532 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
533 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
534 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
535 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
536 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
537 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
538 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
539 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
540 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
541 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
542 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
543 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
544 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
545 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
546 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
547 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
548 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
549 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
550 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
551 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
552 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
553
554 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
555 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
556 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
557 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
558 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
559 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
560 for (iBit = 5; iBit < 32; iBit++)
561 if (s.uECX & RT_BIT(iBit))
562 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
563 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
564 }
565
566 char szString[4*4*3+1] = {0};
567 if (cExtFunctions >= 0x80000002)
568 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
569 if (cExtFunctions >= 0x80000003)
570 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
571 if (cExtFunctions >= 0x80000004)
572 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
573 if (cExtFunctions >= 0x80000002)
574 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
575
576 if (cExtFunctions >= 0x80000005)
577 {
578 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
579 RTTestIPrintf(RTTESTLVL_ALWAYS,
580 "TLB 2/4M Instr/Uni: %s %3d entries\n"
581 "TLB 2/4M Data: %s %3d entries\n",
582 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
583 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
584 RTTestIPrintf(RTTESTLVL_ALWAYS,
585 "TLB 4K Instr/Uni: %s %3d entries\n"
586 "TLB 4K Data: %s %3d entries\n",
587 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
588 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
589 RTTestIPrintf(RTTESTLVL_ALWAYS,
590 "L1 Instr Cache Line Size: %d bytes\n"
591 "L1 Instr Cache Lines Per Tag: %d\n"
592 "L1 Instr Cache Associativity: %s\n"
593 "L1 Instr Cache Size: %d KB\n",
594 (s.uEDX >> 0) & 0xff,
595 (s.uEDX >> 8) & 0xff,
596 getCacheAss((s.uEDX >> 16) & 0xff),
597 (s.uEDX >> 24) & 0xff);
598 RTTestIPrintf(RTTESTLVL_ALWAYS,
599 "L1 Data Cache Line Size: %d bytes\n"
600 "L1 Data Cache Lines Per Tag: %d\n"
601 "L1 Data Cache Associativity: %s\n"
602 "L1 Data Cache Size: %d KB\n",
603 (s.uECX >> 0) & 0xff,
604 (s.uECX >> 8) & 0xff,
605 getCacheAss((s.uECX >> 16) & 0xff),
606 (s.uECX >> 24) & 0xff);
607 }
608
609 if (cExtFunctions >= 0x80000006)
610 {
611 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
612 RTTestIPrintf(RTTESTLVL_ALWAYS,
613 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
614 "L2 TLB 2/4M Data: %s %4d entries\n",
615 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
616 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
617 RTTestIPrintf(RTTESTLVL_ALWAYS,
618 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
619 "L2 TLB 4K Data: %s %4d entries\n",
620 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
621 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
622 RTTestIPrintf(RTTESTLVL_ALWAYS,
623 "L2 Cache Line Size: %d bytes\n"
624 "L2 Cache Lines Per Tag: %d\n"
625 "L2 Cache Associativity: %s\n"
626 "L2 Cache Size: %d KB\n",
627 (s.uEDX >> 0) & 0xff,
628 (s.uEDX >> 8) & 0xf,
629 getL2CacheAss((s.uEDX >> 12) & 0xf),
630 (s.uEDX >> 16) & 0xffff);
631 }
632
633 if (cExtFunctions >= 0x80000007)
634 {
635 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
636 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
637 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
638 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
639 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
640 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
641 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
642 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
643 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
644 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
645 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
646 for (iBit = 9; iBit < 32; iBit++)
647 if (s.uEDX & RT_BIT(iBit))
648 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
649 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
650 }
651
652 if (cExtFunctions >= 0x80000008)
653 {
654 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
655 RTTestIPrintf(RTTESTLVL_ALWAYS,
656 "Physical Address Width: %d bits\n"
657 "Virtual Address Width: %d bits\n"
658 "Guest Physical Address Width: %d bits\n",
659 (s.uEAX >> 0) & 0xff,
660 (s.uEAX >> 8) & 0xff,
661 (s.uEAX >> 16) & 0xff);
662 RTTestIPrintf(RTTESTLVL_ALWAYS,
663 "Physical Core Count: %d\n",
664 ((s.uECX >> 0) & 0xff) + 1);
665 if ((s.uECX >> 12) & 0xf)
666 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
667 }
668
669 if (cExtFunctions >= 0x8000000a)
670 {
671 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
672 RTTestIPrintf(RTTESTLVL_ALWAYS,
673 "SVM Revision: %d (%#x)\n"
674 "Number of Address Space IDs: %d (%#x)\n",
675 s.uEAX & 0xff, s.uEAX & 0xff,
676 s.uEBX, s.uEBX);
677 }
678 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
679 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 8000001b): %#010x\n", ASMGetApicIdExt8000001E());
680}
681
682# if 0
683static void bruteForceCpuId(void)
684{
685 RTTestISub("brute force CPUID leafs");
686 uint32_t auPrevValues[4] = { 0, 0, 0, 0};
687 uint32_t uLeaf = 0;
688 do
689 {
690 uint32_t auValues[4];
691 ASMCpuIdExSlow(uLeaf, 0, 0, 0, &auValues[0], &auValues[1], &auValues[2], &auValues[3]);
692 if ( (auValues[0] != auPrevValues[0] && auValues[0] != uLeaf)
693 || (auValues[1] != auPrevValues[1] && auValues[1] != 0)
694 || (auValues[2] != auPrevValues[2] && auValues[2] != 0)
695 || (auValues[3] != auPrevValues[3] && auValues[3] != 0)
696 || (uLeaf & (UINT32_C(0x08000000) - UINT32_C(1))) == 0)
697 {
698 RTTestIPrintf(RTTESTLVL_ALWAYS,
699 "%08x: %08x %08x %08x %08x\n", uLeaf,
700 auValues[0], auValues[1], auValues[2], auValues[3]);
701 }
702 auPrevValues[0] = auValues[0];
703 auPrevValues[1] = auValues[1];
704 auPrevValues[2] = auValues[2];
705 auPrevValues[3] = auValues[3];
706
707 //uint32_t uSubLeaf = 0;
708 //do
709 //{
710 //
711 //
712 //} while (false);
713 } while (uLeaf++ < UINT32_MAX);
714}
715# endif
716
717#endif /* AMD64 || X86 */
718
719#define TEST_READ(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
720 do { *a_pVar = a_Val; CHECKOP(a_Function(a_pVar), a_Val, a_Fmt, a_Type); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
721
722DECLINLINE(void) tstASMAtomicReadU8Worker(uint8_t volatile *pu8)
723{
724 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 0);
725 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 1);
726 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 2);
727 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 16);
728 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
729 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
730 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 127);
731 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 128);
732 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 169);
733 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 239);
734 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 254);
735 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 255);
736
737 int8_t volatile *pi8 = (int8_t volatile *)pu8;
738 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MAX);
739 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MIN);
740 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, 42);
741 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, -21);
742
743 bool volatile *pf = (bool volatile *)pu8;
744 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, true);
745 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, false);
746}
747
748
749DECLINLINE(void) tstASMAtomicUoReadU8Worker(uint8_t volatile *pu8)
750{
751 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 0);
752 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 1);
753 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 2);
754 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 16);
755 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
756 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
757 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 127);
758 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 128);
759 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 169);
760 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 239);
761 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 254);
762 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 255);
763
764 int8_t volatile *pi8 = (int8_t volatile *)pu8;
765 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MAX);
766 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MIN);
767 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, 42);
768 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, -21);
769
770 bool volatile *pf = (bool volatile *)pu8;
771 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, true);
772 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, false);
773}
774
775
776DECLINLINE(void) tstASMAtomicReadU16Worker(uint16_t volatile *pu16)
777{
778 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 0);
779 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 19983);
780 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, INT16_MAX);
781 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, UINT16_MAX);
782
783 int16_t volatile *pi16 = (int16_t volatile *)pu16;
784 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MAX);
785 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MIN);
786 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, 42);
787 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, -21);
788}
789
790
791DECLINLINE(void) tstASMAtomicUoReadU16Worker(uint16_t volatile *pu16)
792{
793 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 0);
794 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 19983);
795 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, INT16_MAX);
796 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, UINT16_MAX);
797
798 int16_t volatile *pi16 = (int16_t volatile *)pu16;
799 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MAX);
800 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MIN);
801 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, 42);
802 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, -21);
803}
804
805
806DECLINLINE(void) tstASMAtomicReadU32Worker(uint32_t volatile *pu32)
807{
808 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 0);
809 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 19983);
810 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT16_MAX);
811 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT16_MAX);
812 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M-1);
813 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M+1);
814 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G-1);
815 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G+1);
816 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT32_MAX);
817 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT32_MAX);
818
819 int32_t volatile *pi32 = (int32_t volatile *)pu32;
820 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MAX);
821 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MIN);
822 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, 42);
823 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, -21);
824
825#if ARCH_BITS == 32
826 size_t volatile *pcb = (size_t volatile *)pu32;
827 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
828 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
829 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
830
831 void * volatile *ppv = (void * volatile *)pu32;
832 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
833 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
834
835 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
836 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
837 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
838
839 ASMAtomicReadHandle(phEvt, &hEvt);
840 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
841#endif
842}
843
844
845DECLINLINE(void) tstASMAtomicUoReadU32Worker(uint32_t volatile *pu32)
846{
847 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 0);
848 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 19983);
849 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT16_MAX);
850 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT16_MAX);
851 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M-1);
852 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M+1);
853 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G-1);
854 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G+1);
855 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT32_MAX);
856 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT32_MAX);
857
858 int32_t volatile *pi32 = (int32_t volatile *)pu32;
859 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MAX);
860 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MIN);
861 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, 42);
862 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, -21);
863
864#if ARCH_BITS == 32
865 size_t volatile *pcb = (size_t volatile *)pu32;
866 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
867 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
868 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
869
870 void * volatile *ppv = (void * volatile *)pu32;
871 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
872 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
873
874 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
875 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
876 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
877
878 ASMAtomicUoReadHandle(phEvt, &hEvt);
879 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
880#endif
881}
882
883
884DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
885{
886 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 0);
887 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 19983);
888 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT16_MAX);
889 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT16_MAX);
890 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M-1);
891 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M+1);
892 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G-1);
893 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G+1);
894 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT32_MAX);
895 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT32_MAX);
896 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT64_MAX);
897 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_MAX);
898 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_C(0x450872549687134));
899
900 int64_t volatile *pi64 = (int64_t volatile *)pu64;
901 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MAX);
902 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MIN);
903 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, 42);
904 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, -21);
905
906#if ARCH_BITS == 64
907 size_t volatile *pcb = (size_t volatile *)pu64;
908 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
909 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
910 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
911
912 void * volatile *ppv = (void * volatile *)pu64;
913 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
914 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
915
916 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
917 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
918 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
919
920 ASMAtomicReadHandle(phEvt, &hEvt);
921 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
922#endif
923}
924
925
926DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
927{
928 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 0);
929 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 19983);
930 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT16_MAX);
931 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT16_MAX);
932 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M-1);
933 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M+1);
934 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G-1);
935 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G+1);
936 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT32_MAX);
937 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT32_MAX);
938 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT64_MAX);
939 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_MAX);
940 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_C(0x450872549687134));
941
942 int64_t volatile *pi64 = (int64_t volatile *)pu64;
943 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MAX);
944 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MIN);
945 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, 42);
946 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, -21);
947
948#if ARCH_BITS == 64
949 size_t volatile *pcb = (size_t volatile *)pu64;
950 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
951 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
952 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
953
954 void * volatile *ppv = (void * volatile *)pu64;
955 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
956 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
957
958 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
959 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
960 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
961
962 ASMAtomicUoReadHandle(phEvt, &hEvt);
963 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
964#endif
965}
966
967
968static void tstASMAtomicRead(void)
969{
970 DO_SIMPLE_TEST(ASMAtomicReadU8, uint8_t);
971 DO_SIMPLE_TEST(ASMAtomicUoReadU8, uint8_t);
972
973 DO_SIMPLE_TEST(ASMAtomicReadU16, uint16_t);
974 DO_SIMPLE_TEST(ASMAtomicUoReadU16, uint16_t);
975
976 DO_SIMPLE_TEST(ASMAtomicReadU32, uint32_t);
977 DO_SIMPLE_TEST(ASMAtomicUoReadU32, uint32_t);
978
979 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
980 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
981}
982
983
984#define TEST_WRITE(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
985 do { a_Function(a_pVar, a_Val); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
986
987DECLINLINE(void) tstASMAtomicWriteU8Worker(uint8_t volatile *pu8)
988{
989 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 0);
990 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 1);
991 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 2);
992 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 16);
993 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
994 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
995 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 127);
996 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 128);
997 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 169);
998 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 239);
999 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 254);
1000 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 255);
1001
1002 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1003 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MIN);
1004 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MAX);
1005 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, 42);
1006 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, -41);
1007
1008 volatile bool *pf = (volatile bool *)pu8;
1009 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, true);
1010 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, false);
1011}
1012
1013
1014DECLINLINE(void) tstASMAtomicUoWriteU8Worker(uint8_t volatile *pu8)
1015{
1016 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 0);
1017 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 1);
1018 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 2);
1019 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 16);
1020 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1021 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1022 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 127);
1023 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 128);
1024 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 169);
1025 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 239);
1026 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 254);
1027 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 255);
1028
1029 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1030 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MIN);
1031 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MAX);
1032 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, 42);
1033 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, -41);
1034
1035 volatile bool *pf = (volatile bool *)pu8;
1036 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, true);
1037 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, false);
1038}
1039
1040
1041DECLINLINE(void) tstASMAtomicWriteU16Worker(uint16_t volatile *pu16)
1042{
1043 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 0);
1044 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 19983);
1045 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, INT16_MAX);
1046 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, UINT16_MAX);
1047
1048 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1049 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MIN);
1050 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MAX);
1051 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, 42);
1052 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, -41);
1053}
1054
1055
1056DECLINLINE(void) tstASMAtomicUoWriteU16Worker(uint16_t volatile *pu16)
1057{
1058 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 0);
1059 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 19983);
1060 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, INT16_MAX);
1061 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, UINT16_MAX);
1062
1063 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1064 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MIN);
1065 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MAX);
1066 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, 42);
1067 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, -41);
1068}
1069
1070
1071DECLINLINE(void) tstASMAtomicWriteU32Worker(uint32_t volatile *pu32)
1072{
1073 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 0);
1074 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 19983);
1075 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT16_MAX);
1076 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT16_MAX);
1077 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M-1);
1078 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M+1);
1079 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G-1);
1080 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G+1);
1081 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT32_MAX);
1082 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT32_MAX);
1083
1084 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1085 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MIN);
1086 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MAX);
1087 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, 42);
1088 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, -41);
1089
1090#if ARCH_BITS == 32
1091 size_t volatile *pcb = (size_t volatile *)pu32;
1092 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1093 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1094
1095 void * volatile *ppv = (void * volatile *)pu32;
1096 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1097 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1098
1099 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1100 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1101
1102 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1103 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1104#endif
1105}
1106
1107
1108DECLINLINE(void) tstASMAtomicUoWriteU32Worker(uint32_t volatile *pu32)
1109{
1110 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 0);
1111 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 19983);
1112 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT16_MAX);
1113 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT16_MAX);
1114 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M-1);
1115 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M+1);
1116 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G-1);
1117 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G+1);
1118 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT32_MAX);
1119 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT32_MAX);
1120
1121 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1122 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MIN);
1123 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MAX);
1124 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, 42);
1125 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, -41);
1126
1127#if ARCH_BITS == 32
1128 size_t volatile *pcb = (size_t volatile *)pu32;
1129 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1130 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1131
1132 void * volatile *ppv = (void * volatile *)pu32;
1133 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1134 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1135
1136 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1137 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1138
1139 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1140 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1141#endif
1142}
1143
1144
1145DECLINLINE(void) tstASMAtomicWriteU64Worker(uint64_t volatile *pu64)
1146{
1147 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 0);
1148 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 19983);
1149 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT16_MAX);
1150 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT16_MAX);
1151 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M-1);
1152 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M+1);
1153 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G-1);
1154 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G+1);
1155 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT32_MAX);
1156 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT32_MAX);
1157 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT64_MAX);
1158 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_MAX);
1159 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_C(0x450872549687134));
1160
1161 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1162 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MIN);
1163 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MAX);
1164 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, 42);
1165
1166#if ARCH_BITS == 64
1167 size_t volatile *pcb = (size_t volatile *)pu64;
1168 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1169 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1170
1171 void * volatile *ppv = (void * volatile *)pu64;
1172 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1173 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1174
1175 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1176 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1177
1178 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1179 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1180#endif
1181}
1182
1183
1184DECLINLINE(void) tstASMAtomicUoWriteU64Worker(uint64_t volatile *pu64)
1185{
1186 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 0);
1187 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 19983);
1188 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT16_MAX);
1189 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT16_MAX);
1190 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M-1);
1191 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M+1);
1192 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G-1);
1193 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G+1);
1194 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT32_MAX);
1195 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT32_MAX);
1196 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT64_MAX);
1197 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_MAX);
1198 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_C(0x450872549687134));
1199
1200 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1201 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MIN);
1202 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MAX);
1203 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, 42);
1204
1205#if ARCH_BITS == 64
1206 size_t volatile *pcb = (size_t volatile *)pu64;
1207 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1208 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1209
1210 void * volatile *ppv = (void * volatile *)pu64;
1211 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1212 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1213
1214 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1215 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1216
1217 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1218 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1219#endif
1220}
1221
1222static void tstASMAtomicWrite(void)
1223{
1224 DO_SIMPLE_TEST(ASMAtomicWriteU8, uint8_t);
1225 DO_SIMPLE_TEST(ASMAtomicUoWriteU8, uint8_t);
1226
1227 DO_SIMPLE_TEST(ASMAtomicWriteU16, uint16_t);
1228 DO_SIMPLE_TEST(ASMAtomicUoWriteU16, uint16_t);
1229
1230 DO_SIMPLE_TEST(ASMAtomicWriteU32, uint32_t);
1231 DO_SIMPLE_TEST(ASMAtomicUoWriteU32, uint32_t);
1232
1233 DO_SIMPLE_TEST(ASMAtomicWriteU64, uint64_t);
1234 DO_SIMPLE_TEST(ASMAtomicUoWriteU64, uint64_t);
1235}
1236
1237
1238DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
1239{
1240 *pu8 = 0;
1241 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, 1), 0, 1);
1242 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xff)), 1, UINT8_C(0xff));
1243 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0x87)), UINT8_C(0xff), UINT8_C(0x87));
1244 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xfe)), UINT8_C(0x87), UINT8_C(0xfe));
1245
1246 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1247 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(-4)), INT8_C(-2), INT8_C(-4));
1248 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(4)), INT8_C(-4), INT8_C(4));
1249 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MAX), INT8_C(4), INT8_MAX);
1250 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MIN), INT8_MAX, INT8_MIN);
1251 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, 1), INT8_MIN, 1);
1252
1253 bool volatile *pf = (bool volatile *)pu8;
1254 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), true, false);
1255 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), false, false);
1256 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, true), false, true);
1257}
1258
1259
1260DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
1261{
1262 *pu16 = 0;
1263 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 1), 0, 1);
1264 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 0), 1, 0);
1265 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_MAX), 0, UINT16_MAX);
1266 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x7fff)), UINT16_MAX, UINT16_C(0x7fff));
1267 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x8765)), UINT16_C(0x7fff), UINT16_C(0x8765));
1268 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0xfffe)), UINT16_C(0x8765), UINT16_C(0xfffe));
1269
1270 int16_t volatile *pi16 = (int16_t volatile *)pu16;
1271 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MIN), INT16_C(-2), INT16_MIN);
1272 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MAX), INT16_MIN, INT16_MAX);
1273 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, -8), INT16_MAX, -8);
1274 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, 8), -8, 8);
1275}
1276
1277
1278DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
1279{
1280 *pu32 = 0;
1281 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 1), 0, 1);
1282 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 0), 1, 0);
1283 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_MAX), 0, UINT32_MAX);
1284 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0x87654321)), UINT32_MAX, UINT32_C(0x87654321));
1285 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0xfffffffe)), UINT32_C(0x87654321), UINT32_C(0xfffffffe));
1286
1287 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1288 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MIN), INT32_C(-2), INT32_MIN);
1289 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MAX), INT32_MIN, INT32_MAX);
1290 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, -16), INT32_MAX, -16);
1291 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, 16), -16, 16);
1292
1293#if ARCH_BITS == 32
1294 size_t volatile *pcb = (size_t volatile *)pu32;
1295 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0x9481239b)), 0x10, UINT32_C(0x9481239b));
1296 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0xcdef1234)), UINT32_C(0x9481239b), UINT32_C(0xcdef1234));
1297#endif
1298
1299#if R0_ARCH_BITS == 32
1300 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu32;
1301 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT32_C(0x80341237)), UINT32_C(0xcdef1234), UINT32_C(0x80341237));
1302#endif
1303}
1304
1305
1306DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
1307{
1308 *pu64 = 0;
1309 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 1), 0, 1);
1310 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 0), 1, 0);
1311 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_MAX), 0, UINT64_MAX);
1312 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), UINT64_MAX, UINT64_C(0xfedcba0987654321));
1313 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfffffffffffffffe)), UINT64_C(0xfedcba0987654321), UINT64_C(0xfffffffffffffffe));
1314
1315 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1316 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MAX), -2, INT64_MAX);
1317 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MIN), INT64_MAX, INT64_MIN);
1318 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, -32), INT64_MIN, -32);
1319 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, 32), -32, 32);
1320
1321#if ARCH_BITS == 64
1322 size_t volatile *pcb = (size_t volatile *)pu64;
1323 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0x94812396759)), 0x20, UINT64_C(0x94812396759));
1324 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0xcdef1234abdf7896)), UINT64_C(0x94812396759), UINT64_C(0xcdef1234abdf7896));
1325#endif
1326
1327#if R0_ARCH_BITS == 64
1328 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu64;
1329 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT64_C(0xfedc1234567890ab)), UINT64_C(0xcdef1234abdf7896), UINT64_C(0xfedc1234567890ab));
1330#endif
1331}
1332
1333
1334DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
1335{
1336 *ppv = NULL;
1337 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, (void *)(~(uintptr_t)0));
1338 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0x87654321)), (void *)(~(uintptr_t)0), (void *)(~(uintptr_t)0x87654321));
1339 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, NULL), (void *)(~(uintptr_t)0x87654321), NULL);
1340
1341 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgR3Ptr(ppv, (void *)ppv), NULL, (void *)ppv);
1342
1343 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)ppv;
1344 RTSEMEVENT hRet;
1345 ASMAtomicXchgHandle(phEvt, (RTSEMEVENT)(~(uintptr_t)12345), &hRet);
1346 CHECKVAL(hRet, (RTSEMEVENT)ppv, "%p");
1347 CHECKVAL(*phEvt, (RTSEMEVENT)(~(uintptr_t)12345), "%p");
1348}
1349
1350
1351static void tstASMAtomicXchg(void)
1352{
1353 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
1354 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
1355 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
1356 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
1357 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
1358}
1359
1360
1361DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
1362{
1363 *pu8 = 0xff;
1364 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0), false, 0xff);
1365 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, 0);
1366 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, 0x97);
1367 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), false, 0x97);
1368 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x7f, 0x97), true, 0x7f);
1369
1370 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1371 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, -2, 0x7f), true, -2);
1372 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, -2), true, INT8_MAX);
1373 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, INT8_MIN), false, INT8_MAX);
1374 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MIN, INT8_MAX), true, INT8_MIN);
1375 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, 1, INT8_MIN), true, 1);
1376
1377 bool volatile *pf = (bool volatile *)pu8;
1378 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, true, true), true, true);
1379 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), true, false);
1380 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), false, false);
1381 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, false), true, false);
1382}
1383
1384
1385DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
1386{
1387 *pu32 = UINT32_C(0xffffffff);
1388 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, 0), false, UINT32_C(0xffffffff));
1389 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, 0);
1390 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), UINT32_C(0x12345678)), false, 0);
1391 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), 0), true, UINT32_C(0x80088efd));
1392 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0xfffffffe), UINT32_C(0x80088efd)), true, UINT32_C(0xfffffffe));
1393
1394 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1395 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, 2), false, -2);
1396 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, -2), true, INT32_MIN);
1397 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, -2), false, INT32_MIN);
1398 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), true, -19);
1399 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), false, -19);
1400 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, 19, -19), true, 19);
1401 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, -234), false, 19);
1402 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, 19), true, INT32_MAX);
1403
1404#if ARCH_BITS == 32
1405 void * volatile *ppv = (void * volatile *)pu32;
1406 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1407 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1408 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1409 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1410
1411 bool fRc;
1412 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1413 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1414 CHECKVAL(fRc, false, "%d");
1415 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1416
1417 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1418 CHECKVAL(fRc, true, "%d");
1419 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1420#endif
1421}
1422
1423
1424DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
1425{
1426 *pu64 = UINT64_C(0xffffffffffffff);
1427 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, 0), false, UINT64_C(0xffffffffffffff));
1428 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, 0);
1429 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 1), false, 0);
1430 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, UINT64_C(0x80040008008efd));
1431 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), false, UINT64_C(0x80040008008efd));
1432 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0xfffffffffffffffd), UINT64_C(0x80040008008efd)), true, UINT64_C(0xfffffffffffffffd));
1433
1434 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1435 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, 0), false, -3);
1436 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, -3), true, INT64_MAX);
1437 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MIN), false, INT64_MAX);
1438 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MAX), true, INT64_MIN);
1439 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, -29), false, INT64_MIN);
1440 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), true, -29);
1441 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), false, -29);
1442 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, 29, -29), true, 29);
1443
1444#if ARCH_BITS == 64
1445 void * volatile *ppv = (void * volatile *)pu64;
1446 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1447 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1448 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1449 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1450
1451 bool fRc;
1452 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1453 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1454 CHECKVAL(fRc, false, "%d");
1455 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1456
1457 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1458 CHECKVAL(fRc, true, "%d");
1459 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1460#endif
1461}
1462
1463
1464static void tstASMAtomicCmpXchg(void)
1465{
1466 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
1467 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
1468 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
1469}
1470
1471
1472DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
1473{
1474 *pu32 = UINT32_C(0xffffffff);
1475 uint32_t u32Old = UINT32_C(0x80005111);
1476 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1477 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, 0, UINT32_C(0xffffffff));
1478 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), false, 0, UINT32_C(0x00000000));
1479 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), true, UINT32_C(0x80088efd), 0);
1480 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), false, UINT32_C(0x80088efd), UINT32_C(0x80088efd));
1481 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0xffffffe0), UINT32_C(0x80088efd), &u32Old), true, UINT32_C(0xffffffe0), UINT32_C(0x80088efd));
1482
1483 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1484 int32_t i32Old = 0;
1485 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, 32, &i32Old), false, -32, -32);
1486 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, -32, &i32Old), true, 32, -32);
1487 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), true, INT32_MIN, 32);
1488 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), false, INT32_MIN, INT32_MIN);
1489 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MAX, &i32Old), false, INT32_MIN, INT32_MIN);
1490 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MIN, &i32Old), true, INT32_MAX, INT32_MIN);
1491 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 42, INT32_MAX, &i32Old), true, 42, INT32_MAX);
1492
1493#if ARCH_BITS == 32
1494 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1495 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1496 bool fRc = true;
1497 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1498 CHECKVAL(fRc, false, "%d");
1499 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1500 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1501
1502 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1503 CHECKVAL(fRc, true, "%d");
1504 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1505 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1506#endif
1507}
1508
1509
1510DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
1511{
1512 *pu64 = UINT64_C(0xffffffffffffffff);
1513 uint64_t u64Old = UINT64_C(0x8000000051111111);
1514 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
1515 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, 0, UINT64_C(0xffffffffffffffff));
1516 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0x342, &u64Old), false, 0, 0);
1517 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0, &u64Old), true, UINT64_C(0x0080040008008efd), 0);
1518 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd), &u64Old), true, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd));
1519
1520 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1521 int64_t i64Old = -3;
1522 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, 64, &i64Old), false, -64, -64);
1523 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), true, 64, -64);
1524 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), false, 64, 64);
1525 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, -64, &i64Old), false, 64, 64);
1526 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, 64, &i64Old), true, INT64_MIN, 64);
1527 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MAX, INT64_MIN, &i64Old), true, INT64_MAX, INT64_MIN);
1528 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 42, INT64_MAX, &i64Old), true, 42, INT64_MAX);
1529
1530#if ARCH_BITS == 64
1531 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1532 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1533 bool fRc = true;
1534 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1535 CHECKVAL(fRc, false, "%d");
1536 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1537 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1538
1539 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1540 CHECKVAL(fRc, true, "%d");
1541 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1542 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1543
1544 void * volatile *ppv = (void * volatile *)pu64;
1545 void *pvOld;
1546 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, NULL, &pvOld), false, (void *)~(uintptr_t)0x12380964, (void *)~(uintptr_t)0x12380964);
1547 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964, &pvOld), true, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964);
1548
1549 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)~(uintptr_t)99, &pvOld), false, (void *)(intptr_t)12345678, (void *)(intptr_t)12345678);
1550 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)(intptr_t)12345678, &pvOld), true, (void *)~(intptr_t)99, (void *)(intptr_t)12345678);
1551#endif
1552}
1553
1554
1555static void tstASMAtomicCmpXchgEx(void)
1556{
1557 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
1558 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
1559}
1560
1561
1562#define TEST_RET_OLD(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1563 a_Type const uOldExpect = *(a_pVar); \
1564 a_Type uOldRet = a_Function(a_pVar, a_uVal); \
1565 if (RT_LIKELY( uOldRet == (uOldExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1566 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s," a_Fmt ") -> " a_Fmt ", expected " a_Fmt "; %s=" a_Fmt ", expected " a_Fmt "\n", \
1567 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uOldRet, uOldExpect, #a_pVar, *(a_pVar), (a_VarExpect)); \
1568 } while (0)
1569
1570
1571DECLINLINE(void) tstASMAtomicAddU32Worker(uint32_t *pu32)
1572{
1573 *pu32 = 10;
1574 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, 11);
1575 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffffe), 9);
1576 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffff7), 0);
1577 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffff));
1578 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000000));
1579 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000001));
1580 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), 0);
1581 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 0, 0);
1582
1583 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 0, 0);
1584 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 32, UINT32_C(0xffffffe0));
1585 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffe1));
1586 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffde), UINT32_C(0x00000003));
1587}
1588
1589
1590DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
1591{
1592 *pi32 = 10;
1593 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 1, 11);
1594 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -2, 9);
1595 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -9, 0);
1596 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -0x7fffffff, -0x7fffffff);
1597 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, -0x7fffffff);
1598 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0x7fffffff, 0);
1599 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, 0);
1600
1601 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 0, 0);
1602 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 1, -1);
1603 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, INT32_MIN, INT32_MAX);
1604}
1605
1606
1607DECLINLINE(void) tstASMAtomicAddU64Worker(uint64_t volatile *pu64)
1608{
1609 *pu64 = 10;
1610 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, 1, 11);
1611 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffffe), UINT64_C(0x0000000000000009));
1612 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffff7), UINT64_C(0x0000000000000000));
1613 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0x7ffffffffffffff0));
1614 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0xffffffffffffffe0));
1615 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000000), UINT64_C(0xffffffffffffffe0));
1616 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x000000000000001f), UINT64_C(0xffffffffffffffff));
1617 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000000));
1618
1619 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
1620 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000020), UINT64_C(0xffffffffffffffe0));
1621 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffff), UINT64_C(0x7fffffffffffffe1));
1622 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffdd), UINT64_C(0x0000000000000004));
1623}
1624
1625
1626DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
1627{
1628 *pi64 = 10;
1629 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 11);
1630 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -2, 9);
1631 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -9, 0);
1632 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -INT64_MAX, -INT64_MAX);
1633 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, -INT64_MAX);
1634 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -1, INT64_MIN);
1635 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, INT64_MAX, -1);
1636 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 0);
1637 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, 0);
1638
1639 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 0, 0);
1640 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 1, -1);
1641 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, INT64_MIN, INT64_MAX);
1642}
1643
1644
1645
1646DECLINLINE(void) tstASMAtomicAddZWorker(size_t volatile *pcb)
1647{
1648 *pcb = 10;
1649 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, 1, 11);
1650 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)1, 9);
1651 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)8, 0);
1652
1653 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 0, 0);
1654 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 10, ~(size_t)9);
1655}
1656
1657static void tstASMAtomicAdd(void)
1658{
1659 DO_SIMPLE_TEST(ASMAtomicAddU32, uint32_t);
1660 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
1661 DO_SIMPLE_TEST(ASMAtomicAddU64, uint64_t);
1662 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
1663 DO_SIMPLE_TEST(ASMAtomicAddZ, size_t);
1664}
1665
1666
1667#define TEST_RET_NEW_NV(a_Type, a_Fmt, a_pVar, a_Function, a_VarExpect) do { \
1668 a_Type uNewRet = a_Function(a_pVar); \
1669 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1670 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s) -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1671 __FUNCTION__, __LINE__, #a_Function, #a_pVar, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1672 } while (0)
1673
1674
1675DECLINLINE(void) tstASMAtomicDecIncU32Worker(uint32_t volatile *pu32)
1676{
1677 *pu32 = 3;
1678 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 2);
1679 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1680 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 0);
1681 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX);
1682 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 1);
1683 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 2);
1684 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX - 1);
1685 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX);
1686 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 0);
1687 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 1);
1688 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1689 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1690 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1691 *pu32 = _1M;
1692 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, _1M - 1);
1693 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M);
1694 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M + 1);
1695}
1696
1697DECLINLINE(void) tstASMAtomicUoDecIncU32Worker(uint32_t volatile *pu32)
1698{
1699 *pu32 = 3;
1700 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 2);
1701 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1702 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 0);
1703 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX);
1704 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 1);
1705 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 2);
1706 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX - 1);
1707 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX);
1708 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 0);
1709 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 1);
1710 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1711 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1712 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1713 *pu32 = _1M;
1714 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, _1M - 1);
1715 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M);
1716 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M + 1);
1717}
1718
1719
1720DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
1721{
1722 *pi32 = 10;
1723 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 9);
1724 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 8);
1725 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 7);
1726 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 6);
1727 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 5);
1728 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 4);
1729 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 3);
1730 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1731 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 1);
1732 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 0);
1733 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -1);
1734 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -2);
1735 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, -1);
1736 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 0);
1737 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 1);
1738 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 2);
1739 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1740 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1741 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1742 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1743 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1744 *pi32 = INT32_MAX;
1745 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, INT32_MAX - 1);
1746 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MAX);
1747 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MIN);
1748}
1749
1750
1751#if 0
1752DECLINLINE(void) tstASMAtomicUoDecIncS32Worker(int32_t volatile *pi32)
1753{
1754 *pi32 = 10;
1755 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 9);
1756 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 8);
1757 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 7);
1758 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 6);
1759 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 5);
1760 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 4);
1761 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 3);
1762 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1763 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 1);
1764 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 0);
1765 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -1);
1766 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -2);
1767 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, -1);
1768 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 0);
1769 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 1);
1770 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 2);
1771 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1772 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1773 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1774 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1775 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1776 *pi32 = INT32_MAX;
1777 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, INT32_MAX - 1);
1778 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MAX);
1779 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MIN);
1780}
1781#endif
1782
1783
1784DECLINLINE(void) tstASMAtomicDecIncU64Worker(uint64_t volatile *pu64)
1785{
1786 *pu64 = 3;
1787 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 2);
1788 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 1);
1789 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 0);
1790 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX);
1791 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 1);
1792 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 2);
1793 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX - 1);
1794 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX);
1795 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 0);
1796 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 1);
1797 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 2);
1798 *pu64 = _4G - 1;
1799 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G - 2);
1800 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G - 1);
1801 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G);
1802 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G + 1);
1803 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G);
1804}
1805
1806
1807#if 0
1808DECLINLINE(void) tstASMAtomicUoDecIncU64Worker(uint64_t volatile *pu64)
1809{
1810 *pu64 = 3;
1811 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 2);
1812 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 1);
1813 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 0);
1814 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX);
1815 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 1);
1816 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 2);
1817 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX - 1);
1818 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX);
1819 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 0);
1820 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 1);
1821 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 2);
1822 *pu64 = _4G - 1;
1823 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G - 2);
1824 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G - 1);
1825 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G);
1826 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G + 1);
1827 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G);
1828}
1829#endif
1830
1831
1832DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
1833{
1834 *pi64 = 10;
1835 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 9);
1836 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 8);
1837 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 7);
1838 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 6);
1839 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 5);
1840 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 4);
1841 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 3);
1842 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1843 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 1);
1844 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 0);
1845 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -1);
1846 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -2);
1847 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, -1);
1848 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 0);
1849 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 1);
1850 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 2);
1851 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1852 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1853 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1854 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1855 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1856 *pi64 = INT64_MAX;
1857 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, INT64_MAX - 1);
1858}
1859
1860
1861#if 0
1862DECLINLINE(void) tstASMAtomicUoDecIncS64Worker(int64_t volatile *pi64)
1863{
1864 *pi64 = 10;
1865 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 9);
1866 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 8);
1867 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 7);
1868 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 6);
1869 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 5);
1870 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 4);
1871 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 3);
1872 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1873 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 1);
1874 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 0);
1875 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -1);
1876 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -2);
1877 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, -1);
1878 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 0);
1879 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 1);
1880 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 2);
1881 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1882 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1883 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1884 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1885 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1886 *pi64 = INT64_MAX;
1887 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, INT64_MAX - 1);
1888}
1889#endif
1890
1891
1892DECLINLINE(void) tstASMAtomicDecIncZWorker(size_t volatile *pcb)
1893{
1894 size_t const uBaseVal = ~(size_t)0 >> 7;
1895 *pcb = uBaseVal;
1896 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1897 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 2);
1898 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 3);
1899 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 2);
1900 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 1);
1901 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1902 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal + 1);
1903 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal);
1904 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1905 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1906}
1907
1908
1909static void tstASMAtomicDecInc(void)
1910{
1911 DO_SIMPLE_TEST(ASMAtomicDecIncU32, uint32_t);
1912 DO_SIMPLE_TEST(ASMAtomicUoDecIncU32, uint32_t);
1913 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
1914 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS32, int32_t);
1915 DO_SIMPLE_TEST(ASMAtomicDecIncU64, uint64_t);
1916 //DO_SIMPLE_TEST(ASMAtomicUoDecIncU64, uint64_t);
1917 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
1918 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS64, int64_t);
1919 DO_SIMPLE_TEST(ASMAtomicDecIncZ, size_t);
1920}
1921
1922
1923#define TEST_RET_VOID(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1924 a_Function(a_pVar, a_uVal); \
1925 if (RT_LIKELY( *(a_pVar) == (a_VarExpect) )) { } \
1926 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> %s=" a_Fmt ", expected " a_Fmt "\n", \
1927 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, #a_pVar, *(a_pVar), (a_VarExpect)); \
1928 } while (0)
1929
1930#define TEST_RET_NEW(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1931 a_Type uNewRet = a_Function(a_pVar, a_uVal); \
1932 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1933 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1934 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1935 } while (0)
1936
1937
1938DECLINLINE(void) tstASMAtomicAndOrXorU32Worker(uint32_t volatile *pu32)
1939{
1940 *pu32 = UINT32_C(0xffffffff);
1941 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1942 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1943 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1944 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1945 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(1), UINT32_C(1));
1946 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1947 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1948 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0), UINT32_C(0));
1949 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1950 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1951 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1952 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1953 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1954}
1955
1956
1957DECLINLINE(void) tstASMAtomicUoAndOrXorU32Worker(uint32_t volatile *pu32)
1958{
1959 *pu32 = UINT32_C(0xffffffff);
1960 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1961 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1962 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1963 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1964 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(1), UINT32_C(1));
1965 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1966 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1967 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0), UINT32_C(0));
1968 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1969 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1970 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1971 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1972 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1973}
1974
1975
1976DECLINLINE(void) tstASMAtomicAndOrXorExU32Worker(uint32_t volatile *pu32)
1977{
1978 *pu32 = UINT32_C(0xffffffff);
1979 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1980 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1981 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1982 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1983 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(1), UINT32_C(1));
1984 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1985 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1986 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0), UINT32_C(0));
1987 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1988 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1989 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1990 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1991 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1992}
1993
1994
1995DECLINLINE(void) tstASMAtomicAndOrXorU64Worker(uint64_t volatile *pu64)
1996{
1997 *pu64 = UINT64_C(0xffffffff);
1998 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
1999 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2000 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2001 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2002 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(1), UINT64_C(1));
2003 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2004 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2005 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2006 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2007 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2008 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2009 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2010 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2011
2012 /* full 64-bit */
2013 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2014 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2015 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2016 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2017 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2018 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2019 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2020 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2021 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2022 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2023 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2024 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2025 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2026 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2027}
2028
2029
2030DECLINLINE(void) tstASMAtomicUoAndOrXorU64Worker(uint64_t volatile *pu64)
2031{
2032 *pu64 = UINT64_C(0xffffffff);
2033 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2034 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2035 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2036 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2037 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(1), UINT64_C(1));
2038 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2039 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2040 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2041 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2042 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2043 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2044 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2045 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2046
2047 /* full 64-bit */
2048 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2049 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2050 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2051 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2052 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2053 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2054 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2055 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2056 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2057 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2058 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2059 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2060 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2061 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2062}
2063
2064
2065#if 0
2066DECLINLINE(void) tstASMAtomicAndOrXorExU64Worker(uint64_t volatile *pu64)
2067{
2068 *pu64 = UINT64_C(0xffffffff);
2069 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2070 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2071 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2072 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2073 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(1), UINT64_C(1));
2074 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2075 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2076 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2077 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2078 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2079 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2080 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2081 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2082
2083 /* full 64-bit */
2084 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2085 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2086 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2087 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2088 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2089 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2090 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2091 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2092 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2093 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2094 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2095 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2096 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2097 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2098}
2099#endif
2100
2101
2102static void tstASMAtomicAndOrXor(void)
2103{
2104 DO_SIMPLE_TEST(ASMAtomicAndOrXorU32, uint32_t);
2105 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU32, uint32_t);
2106 DO_SIMPLE_TEST(ASMAtomicAndOrXorExU32, uint32_t);
2107 DO_SIMPLE_TEST(ASMAtomicAndOrXorU64, uint64_t);
2108 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU64, uint64_t);
2109 //DO_SIMPLE_TEST(ASMAtomicAndOrXorExU64, uint64_t);
2110}
2111
2112
2113typedef struct
2114{
2115 uint8_t ab[PAGE_SIZE];
2116} TSTPAGE;
2117
2118
2119DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
2120{
2121 for (unsigned j = 0; j < 16; j++)
2122 {
2123 memset(pPage, 0x11 * j, sizeof(*pPage));
2124 ASMMemZeroPage(pPage);
2125 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
2126 if (pPage->ab[i])
2127 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
2128 }
2129}
2130
2131
2132static void tstASMMemZeroPage(void)
2133{
2134 DO_SIMPLE_TEST(ASMMemZeroPage, TSTPAGE);
2135}
2136
2137
2138void tstASMMemIsZeroPage(RTTEST hTest)
2139{
2140 RTTestSub(hTest, "ASMMemIsZeroPage");
2141
2142 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2143 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2144 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
2145
2146 memset(pvPage1, 0, PAGE_SIZE);
2147 memset(pvPage2, 0, PAGE_SIZE);
2148 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
2149 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
2150
2151 memset(pvPage1, 0xff, PAGE_SIZE);
2152 memset(pvPage2, 0xff, PAGE_SIZE);
2153 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2154 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2155
2156 memset(pvPage1, 0, PAGE_SIZE);
2157 memset(pvPage2, 0, PAGE_SIZE);
2158 for (unsigned off = 0; off < PAGE_SIZE; off++)
2159 {
2160 ((uint8_t *)pvPage1)[off] = 1;
2161 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2162 ((uint8_t *)pvPage1)[off] = 0;
2163
2164 ((uint8_t *)pvPage2)[off] = 0x80;
2165 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2166 ((uint8_t *)pvPage2)[off] = 0;
2167 }
2168
2169 RTTestSubDone(hTest);
2170}
2171
2172
2173void tstASMMemFirstMismatchingU8(RTTEST hTest)
2174{
2175 RTTestSub(hTest, "ASMMemFirstMismatchingU8");
2176
2177 uint8_t *pbPage1 = (uint8_t *)RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2178 uint8_t *pbPage2 = (uint8_t *)RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2179 RTTESTI_CHECK_RETV(pbPage1 && pbPage2);
2180
2181 memset(pbPage1, 0, PAGE_SIZE);
2182 memset(pbPage2, 0, PAGE_SIZE);
2183 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0) == NULL);
2184 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0) == NULL);
2185 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 1) == pbPage1);
2186 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 1) == pbPage2);
2187 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0x87) == pbPage1);
2188 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0x87) == pbPage2);
2189 RTTESTI_CHECK(ASMMemIsZero(pbPage1, PAGE_SIZE));
2190 RTTESTI_CHECK(ASMMemIsZero(pbPage2, PAGE_SIZE));
2191 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2192 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2193 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0x34));
2194 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0x88));
2195 unsigned cbSub = 32;
2196 while (cbSub-- > 0)
2197 {
2198 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2199 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2200 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0) == NULL);
2201 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0) == NULL);
2202
2203 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0x34) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2204 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0x99) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2205 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0x42) == pbPage1 || !cbSub);
2206 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0x88) == pbPage2 || !cbSub);
2207 }
2208
2209 memset(pbPage1, 0xff, PAGE_SIZE);
2210 memset(pbPage2, 0xff, PAGE_SIZE);
2211 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xff) == NULL);
2212 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xff) == NULL);
2213 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xfe) == pbPage1);
2214 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xfe) == pbPage2);
2215 RTTESTI_CHECK(!ASMMemIsZero(pbPage1, PAGE_SIZE));
2216 RTTESTI_CHECK(!ASMMemIsZero(pbPage2, PAGE_SIZE));
2217 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0xff));
2218 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0xff));
2219 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2220 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2221 cbSub = 32;
2222 while (cbSub-- > 0)
2223 {
2224 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2225 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2226 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xff) == NULL);
2227 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xff) == NULL);
2228
2229 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2230 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2231 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xfe) == pbPage1 || !cbSub);
2232 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xfe) == pbPage2 || !cbSub);
2233 }
2234
2235
2236 /*
2237 * Various alignments and sizes.
2238 */
2239 uint8_t const bFiller1 = 0x00;
2240 uint8_t const bFiller2 = 0xf6;
2241 size_t const cbBuf = 128;
2242 uint8_t *pbBuf1 = pbPage1;
2243 uint8_t *pbBuf2 = &pbPage2[PAGE_SIZE - cbBuf]; /* Put it up against the tail guard */
2244 memset(pbPage1, ~bFiller1, PAGE_SIZE);
2245 memset(pbPage2, ~bFiller2, PAGE_SIZE);
2246 memset(pbBuf1, bFiller1, cbBuf);
2247 memset(pbBuf2, bFiller2, cbBuf);
2248 for (size_t offNonZero = 0; offNonZero < cbBuf; offNonZero++)
2249 {
2250 uint8_t bRand = (uint8_t)RTRandU32();
2251 pbBuf1[offNonZero] = bRand | 1;
2252 pbBuf2[offNonZero] = (0x80 | bRand) ^ 0xf6;
2253
2254 for (size_t offStart = 0; offStart < 32; offStart++)
2255 {
2256 size_t const cbMax = cbBuf - offStart;
2257 for (size_t cb = 0; cb < cbMax; cb++)
2258 {
2259 size_t const offEnd = offStart + cb;
2260 uint8_t bSaved1, bSaved2;
2261 if (offEnd < PAGE_SIZE)
2262 {
2263 bSaved1 = pbBuf1[offEnd];
2264 bSaved2 = pbBuf2[offEnd];
2265 pbBuf1[offEnd] = 0xff;
2266 pbBuf2[offEnd] = 0xff;
2267 }
2268#ifdef _MSC_VER /* simple stupid compiler warnings */
2269 else
2270 bSaved1 = bSaved2 = 0;
2271#endif
2272
2273 uint8_t *pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf1 + offStart, cb, bFiller1);
2274 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf1[offNonZero] : pbRet == NULL);
2275
2276 pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf2 + offStart, cb, bFiller2);
2277 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf2[offNonZero] : pbRet == NULL);
2278
2279 if (offEnd < PAGE_SIZE)
2280 {
2281 pbBuf1[offEnd] = bSaved1;
2282 pbBuf2[offEnd] = bSaved2;
2283 }
2284 }
2285 }
2286
2287 pbBuf1[offNonZero] = 0;
2288 pbBuf2[offNonZero] = 0xf6;
2289 }
2290
2291 RTTestSubDone(hTest);
2292}
2293
2294
2295void tstASMMemZero32(void)
2296{
2297 RTTestSub(g_hTest, "ASMMemFill32");
2298
2299 struct
2300 {
2301 uint64_t u64Magic1;
2302 uint8_t abPage[PAGE_SIZE - 32];
2303 uint64_t u64Magic2;
2304 } Buf1, Buf2, Buf3;
2305
2306 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2307 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
2308 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2309 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2310 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
2311 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2312 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2313 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
2314 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2315 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
2316 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
2317 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
2318 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2319 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2320 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2321 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2322 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2323 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2324 {
2325 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
2326 }
2327 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
2328 if (Buf1.abPage[i])
2329 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2330 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
2331 if (Buf2.abPage[i])
2332 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2333 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
2334 if (Buf3.abPage[i])
2335 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2336}
2337
2338
2339void tstASMMemFill32(void)
2340{
2341 RTTestSub(g_hTest, "ASMMemFill32");
2342
2343 struct
2344 {
2345 uint64_t u64Magic1;
2346 uint32_t au32Page[PAGE_SIZE / 4];
2347 uint64_t u64Magic2;
2348 } Buf1;
2349 struct
2350 {
2351 uint64_t u64Magic1;
2352 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
2353 uint64_t u64Magic2;
2354 } Buf2;
2355 struct
2356 {
2357 uint64_t u64Magic1;
2358 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
2359 uint64_t u64Magic2;
2360 } Buf3;
2361
2362 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2363 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
2364 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2365 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2366 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
2367 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2368 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2369 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
2370 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2371 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
2372 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
2373 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
2374 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2375 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2376 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2377 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2378 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2379 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2380 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
2381 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
2382 if (Buf1.au32Page[i] != 0xdeadbeef)
2383 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
2384 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
2385 if (Buf2.au32Page[i] != 0xcafeff01)
2386 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
2387 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
2388 if (Buf3.au32Page[i] != 0xf00dd00f)
2389 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
2390}
2391
2392
2393void tstASMMisc(void)
2394{
2395 RTTestSub(g_hTest, "Misc");
2396 for (uint32_t i = 0; i < 20; i++)
2397 {
2398 ASMWriteFence();
2399 ASMCompilerBarrier();
2400 ASMReadFence();
2401 ASMNopPause();
2402 ASMSerializeInstruction();
2403 ASMMemoryFence();
2404 }
2405}
2406
2407void tstASMMath(void)
2408{
2409 RTTestSub(g_hTest, "Math");
2410
2411 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
2412 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
2413
2414 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
2415 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
2416
2417 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x00000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2418 CHECKVAL(u32, UINT32_C(0x00000001), "%#018RX32");
2419 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10000000), UINT32_C(0x80000000), UINT32_C(0x20000000));
2420 CHECKVAL(u32, UINT32_C(0x40000000), "%#018RX32");
2421 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x76543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2422 CHECKVAL(u32, UINT32_C(0x76543210), "%#018RX32");
2423 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2424 CHECKVAL(u32, UINT32_C(0xffffffff), "%#018RX32");
2425 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2426 CHECKVAL(u32, UINT32_C(0xfffffff0), "%#018RX32");
2427 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2428 CHECKVAL(u32, UINT32_C(0x05c584ce), "%#018RX32");
2429 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2430 CHECKVAL(u32, UINT32_C(0x2d860795), "%#018RX32");
2431
2432#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2433 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2434 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
2435 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
2436 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
2437 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2438 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
2439 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2440 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
2441 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2442 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
2443 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2444 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
2445 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2446 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
2447
2448# if 0 /* bird: question is whether this should trap or not:
2449 *
2450 * frank: Of course it must trap:
2451 *
2452 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
2453 *
2454 * During the following division, the quotient must fit into a 32-bit register.
2455 * Therefore the smallest valid divisor is
2456 *
2457 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
2458 *
2459 * which is definitely greater than 0x3b9aca00.
2460 *
2461 * bird: No, the C version does *not* crash. So, the question is whether there's any
2462 * code depending on it not crashing.
2463 *
2464 * Of course the assembly versions of the code crash right now for the reasons you've
2465 * given, but the 32-bit MSC version does not crash.
2466 *
2467 * frank: The C version does not crash but delivers incorrect results for this case.
2468 * The reason is
2469 *
2470 * u.s.Hi = (unsigned long)(u64Hi / u32C);
2471 *
2472 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
2473 * to 32 bit. If using this (optimized and fast) function we should just be sure that
2474 * the operands are in a valid range.
2475 */
2476 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
2477 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
2478# endif
2479#endif /* AMD64 || X86 */
2480
2481 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
2482 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
2483
2484 int32_t i32;
2485 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
2486 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2487 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
2488 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2489 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
2490 CHECKVAL(i32, INT32_C(1), "%010RI32");
2491
2492 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
2493 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
2494 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
2495 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
2496}
2497
2498
2499void tstASMByteSwap(void)
2500{
2501 RTTestSub(g_hTest, "ASMByteSwap*");
2502
2503 uint64_t u64In = UINT64_C(0x0011223344556677);
2504 uint64_t u64Out = ASMByteSwapU64(u64In);
2505 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
2506 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
2507 u64Out = ASMByteSwapU64(u64Out);
2508 CHECKVAL(u64Out, u64In, "%#018RX64");
2509 u64In = UINT64_C(0x0123456789abcdef);
2510 u64Out = ASMByteSwapU64(u64In);
2511 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
2512 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
2513 u64Out = ASMByteSwapU64(u64Out);
2514 CHECKVAL(u64Out, u64In, "%#018RX64");
2515 u64In = 0;
2516 u64Out = ASMByteSwapU64(u64In);
2517 CHECKVAL(u64Out, u64In, "%#018RX64");
2518 u64In = UINT64_MAX;
2519 u64Out = ASMByteSwapU64(u64In);
2520 CHECKVAL(u64Out, u64In, "%#018RX64");
2521
2522 uint32_t u32In = UINT32_C(0x00112233);
2523 uint32_t u32Out = ASMByteSwapU32(u32In);
2524 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
2525 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
2526 u32Out = ASMByteSwapU32(u32Out);
2527 CHECKVAL(u32Out, u32In, "%#010RX32");
2528 u32In = UINT32_C(0x12345678);
2529 u32Out = ASMByteSwapU32(u32In);
2530 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
2531 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
2532 u32Out = ASMByteSwapU32(u32Out);
2533 CHECKVAL(u32Out, u32In, "%#010RX32");
2534 u32In = 0;
2535 u32Out = ASMByteSwapU32(u32In);
2536 CHECKVAL(u32Out, u32In, "%#010RX32");
2537 u32In = UINT32_MAX;
2538 u32Out = ASMByteSwapU32(u32In);
2539 CHECKVAL(u32Out, u32In, "%#010RX32");
2540
2541 uint16_t u16In = UINT16_C(0x0011);
2542 uint16_t u16Out = ASMByteSwapU16(u16In);
2543 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
2544 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
2545 u16Out = ASMByteSwapU16(u16Out);
2546 CHECKVAL(u16Out, u16In, "%#06RX16");
2547 u16In = UINT16_C(0x1234);
2548 u16Out = ASMByteSwapU16(u16In);
2549 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
2550 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
2551 u16Out = ASMByteSwapU16(u16Out);
2552 CHECKVAL(u16Out, u16In, "%#06RX16");
2553 u16In = 0;
2554 u16Out = ASMByteSwapU16(u16In);
2555 CHECKVAL(u16Out, u16In, "%#06RX16");
2556 u16In = UINT16_MAX;
2557 u16Out = ASMByteSwapU16(u16In);
2558 CHECKVAL(u16Out, u16In, "%#06RX16");
2559}
2560
2561
2562void tstASMBench(void)
2563{
2564 /*
2565 * Make this static. We don't want to have this located on the stack.
2566 */
2567 static uint8_t volatile s_u8;
2568 static int8_t volatile s_i8;
2569 static uint16_t volatile s_u16;
2570 static int16_t volatile s_i16;
2571 static uint32_t volatile s_u32;
2572 static int32_t volatile s_i32;
2573 static uint64_t volatile s_u64;
2574 static int64_t volatile s_i64;
2575 unsigned i;
2576 const unsigned cRounds = _2M; /* Must be multiple of 8 */
2577 uint64_t u64Elapsed;
2578
2579 RTTestSub(g_hTest, "Benchmarking");
2580
2581#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2582# define BENCH(op, str) \
2583 do { \
2584 RTThreadYield(); \
2585 u64Elapsed = ASMReadTSC(); \
2586 for (i = cRounds; i > 0; i--) \
2587 op; \
2588 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2589 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
2590 } while (0)
2591#else
2592# define BENCH(op, str) \
2593 do { \
2594 RTThreadYield(); \
2595 u64Elapsed = RTTimeNanoTS(); \
2596 for (i = cRounds / 8; i > 0; i--) \
2597 { \
2598 op; \
2599 op; \
2600 op; \
2601 op; \
2602 op; \
2603 op; \
2604 op; \
2605 op; \
2606 } \
2607 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
2608 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \
2609 } while (0)
2610#endif
2611#if (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) && !defined(GCC44_32BIT_PIC)
2612# define BENCH_TSC(op, str) \
2613 do { \
2614 RTThreadYield(); \
2615 u64Elapsed = ASMReadTSC(); \
2616 for (i = cRounds / 8; i > 0; i--) \
2617 { \
2618 op; \
2619 op; \
2620 op; \
2621 op; \
2622 op; \
2623 op; \
2624 op; \
2625 op; \
2626 } \
2627 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2628 RTTestValue(g_hTest, str, u64Elapsed / cRounds, /*RTTESTUNIT_TICKS_PER_CALL*/ RTTESTUNIT_NONE); \
2629 } while (0)
2630#else
2631# define BENCH_TSC(op, str) BENCH(op, str)
2632#endif
2633
2634 BENCH(s_u32 = 0, "s_u32 = 0");
2635 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
2636 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
2637 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
2638 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
2639 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
2640 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
2641 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
2642 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
2643 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
2644 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
2645 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
2646 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
2647 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
2648 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
2649 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
2650 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
2651 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
2652 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
2653 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
2654 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
2655 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
2656 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
2657 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
2658 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
2659 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
2660 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
2661 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
2662 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
2663 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
2664 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
2665 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
2666 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
2667 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
2668 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
2669 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
2670 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
2671 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
2672 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
2673 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
2674 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
2675 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
2676 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
2677 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
2678 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
2679 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
2680 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
2681 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
2682 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
2683 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
2684 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
2685 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
2686 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
2687 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
2688 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
2689 BENCH(ASMAtomicUoIncU32(&s_u32), "ASMAtomicUoIncU32");
2690 BENCH(ASMAtomicUoDecU32(&s_u32), "ASMAtomicUoDecU32");
2691 BENCH(ASMAtomicUoAndU32(&s_u32, 0xffffffff), "ASMAtomicUoAndU32");
2692 BENCH(ASMAtomicUoOrU32(&s_u32, 0xffffffff), "ASMAtomicUoOrU32");
2693#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2694 BENCH_TSC(ASMSerializeInstructionCpuId(), "ASMSerializeInstructionCpuId");
2695 BENCH_TSC(ASMSerializeInstructionIRet(), "ASMSerializeInstructionIRet");
2696#endif
2697 BENCH(ASMReadFence(), "ASMReadFence");
2698 BENCH(ASMWriteFence(), "ASMWriteFence");
2699 BENCH(ASMMemoryFence(), "ASMMemoryFence");
2700 BENCH(ASMSerializeInstruction(), "ASMSerializeInstruction");
2701 BENCH(ASMNopPause(), "ASMNopPause");
2702
2703 /* The Darwin gcc does not like this ... */
2704#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2705 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
2706 BENCH(s_u32 = ASMGetApicIdExt0B(), "ASMGetApicIdExt0B");
2707 BENCH(s_u32 = ASMGetApicIdExt8000001E(), "ASMGetApicIdExt8000001E");
2708#endif
2709#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2710 uint32_t uAux;
2711 if ( ASMHasCpuId()
2712 && ASMIsValidExtRange(ASMCpuId_EAX(0x80000000))
2713 && (ASMCpuId_EDX(0x80000001) & X86_CPUID_EXT_FEATURE_EDX_RDTSCP) )
2714 {
2715 BENCH_TSC(ASMSerializeInstructionRdTscp(), "ASMSerializeInstructionRdTscp");
2716 BENCH(s_u64 = ASMReadTscWithAux(&uAux), "ASMReadTscWithAux");
2717 }
2718 BENCH(s_u64 = ASMReadTSC(), "ASMReadTSC");
2719 union
2720 {
2721 uint64_t u64[2];
2722 RTIDTR Unaligned;
2723 struct
2724 {
2725 uint16_t abPadding[3];
2726 RTIDTR Aligned;
2727 } s;
2728 } uBuf;
2729 Assert(((uintptr_t)&uBuf.Unaligned.pIdt & (sizeof(uintptr_t) - 1)) != 0);
2730 BENCH(ASMGetIDTR(&uBuf.Unaligned), "ASMGetIDTR/unaligned");
2731 Assert(((uintptr_t)&uBuf.s.Aligned.pIdt & (sizeof(uintptr_t) - 1)) == 0);
2732 BENCH(ASMGetIDTR(&uBuf.s.Aligned), "ASMGetIDTR/aligned");
2733#endif
2734
2735#undef BENCH
2736}
2737
2738
2739int main(int argc, char **argv)
2740{
2741 RT_NOREF_PV(argc); RT_NOREF_PV(argv);
2742
2743 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
2744 if (rc)
2745 return rc;
2746 RTTestBanner(g_hTest);
2747
2748 /*
2749 * Execute the tests.
2750 */
2751#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2752 tstASMCpuId();
2753 //bruteForceCpuId();
2754#endif
2755#if 1
2756 tstASMAtomicRead();
2757 tstASMAtomicWrite();
2758 tstASMAtomicXchg();
2759 tstASMAtomicCmpXchg();
2760 tstASMAtomicCmpXchgEx();
2761
2762 tstASMAtomicAdd();
2763 tstASMAtomicDecInc();
2764 tstASMAtomicAndOrXor();
2765
2766 tstASMMemZeroPage();
2767 tstASMMemIsZeroPage(g_hTest);
2768 tstASMMemFirstMismatchingU8(g_hTest);
2769 tstASMMemZero32();
2770 tstASMMemFill32();
2771
2772 tstASMMisc();
2773
2774 tstASMMath();
2775
2776 tstASMByteSwap();
2777
2778 tstASMBench();
2779#endif
2780
2781 /*
2782 * Show the result.
2783 */
2784 return RTTestSummaryAndDestroy(g_hTest);
2785}
2786
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette