VirtualBox

source: vbox/trunk/src/VBox/Runtime/testcase/tstRTInlineAsm.cpp@ 87228

最後變更 在這個檔案從87228是 87228,由 vboxsync 提交於 4 年 前

tstRTInlineAsm: Extending testcase. bugref:9898

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Id Revision
檔案大小: 139.7 KB
 
1/* $Id: tstRTInlineAsm.cpp 87228 2021-01-12 13:56:20Z vboxsync $ */
2/** @file
3 * IPRT Testcase - inline assembly.
4 */
5
6/*
7 * Copyright (C) 2006-2020 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.alldomusa.eu.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <iprt/asm.h>
32#include <iprt/asm-math.h>
33
34/* See http://gcc.gnu.org/bugzilla/show_bug.cgi?id=44018. Only gcc version 4.4
35 * is affected. No harm for the VBox code: If the cpuid code compiles, it works
36 * fine. */
37#if defined(__GNUC__) && defined(RT_ARCH_X86) && defined(__PIC__)
38# if __GNUC__ == 4 && __GNUC_MINOR__ == 4
39# define GCC44_32BIT_PIC
40# endif
41#endif
42
43#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
44# include <iprt/asm-amd64-x86.h>
45# include <iprt/x86.h>
46#else
47# include <iprt/time.h>
48#endif
49#include <iprt/mem.h>
50#include <iprt/param.h>
51#include <iprt/rand.h>
52#include <iprt/stream.h>
53#include <iprt/string.h>
54#include <iprt/thread.h>
55#include <iprt/test.h>
56#include <iprt/time.h>
57
58
59
60/*********************************************************************************************************************************
61* Defined Constants And Macros *
62*********************************************************************************************************************************/
63#define CHECKVAL(val, expect, fmt) \
64 do \
65 { \
66 if ((val) != (expect)) \
67 { \
68 RTTestFailed(g_hTest, "%s, %d: " #val ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (expect), (val)); \
69 } \
70 } while (0)
71
72#define CHECKOP(op, expect, fmt, type) \
73 do \
74 { \
75 type val = op; \
76 if (val != (type)(expect)) \
77 { \
78 RTTestFailed(g_hTest, "%s, %d: " #op ": expected " fmt " got " fmt "\n", __FUNCTION__, __LINE__, (type)(expect), val); \
79 } \
80 } while (0)
81
82#define CHECK_OP_AND_VAL(a_Type, a_Fmt, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
83 do { \
84 CHECKOP(a_Operation, a_ExpectRetVal, a_Fmt, a_Type); \
85 CHECKVAL(*a_pVar, a_ExpectVarVal, a_Fmt); \
86 } while (0)
87
88#define CHECK_OP_AND_VAL_EX(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_Operation, a_ExpectRetVal, a_ExpectVarVal) \
89 do { \
90 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
91 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
92 } while (0)
93
94#define CHECK_OP_AND_VAL_EX2(a_TypeRet, a_FmtRet, a_FmtVar, a_pVar, a_uVar2, a_Operation, a_ExpectRetVal, a_ExpectVarVal, a_ExpectVarVal2) \
95 do { \
96 CHECKOP(a_Operation, a_ExpectRetVal, a_FmtRet, a_TypeRet); \
97 CHECKVAL(*a_pVar, a_ExpectVarVal, a_FmtVar); \
98 CHECKVAL(a_uVar2, a_ExpectVarVal2, a_FmtVar); \
99 } while (0)
100
101/**
102 * Calls a worker function with different worker variable storage types.
103 */
104#define DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type) \
105 do \
106 { \
107 type *pVar = (type *)RTTestGuardedAllocHead(g_hTest, sizeof(type)); \
108 RTTEST_CHECK_BREAK(g_hTest, pVar); \
109 a_WorkerFunction(pVar); \
110 RTTestGuardedFree(g_hTest, pVar); \
111 \
112 pVar = (type *)RTTestGuardedAllocTail(g_hTest, sizeof(type)); \
113 RTTEST_CHECK_BREAK(g_hTest, pVar); \
114 a_WorkerFunction(pVar); \
115 RTTestGuardedFree(g_hTest, pVar); \
116 } while (0)
117
118
119/**
120 * Calls a worker function with different worker variable storage types.
121 */
122#define DO_SIMPLE_TEST_NO_SUB(a_WorkerFunction, type) \
123 do \
124 { \
125 type StackVar; \
126 a_WorkerFunction(&StackVar); \
127 DO_SIMPLE_TEST_NO_SUB_NO_STACK(a_WorkerFunction, type); \
128 } while (0)
129
130/**
131 * Calls a worker function with different worker variable storage types.
132 */
133#define DO_SIMPLE_TEST(name, type) \
134 do \
135 { \
136 RTTestISub(#name); \
137 DO_SIMPLE_TEST_NO_SUB(tst ## name ## Worker, type); \
138 } while (0)
139
140
141/*********************************************************************************************************************************
142* Global Variables *
143*********************************************************************************************************************************/
144/** The test instance. */
145static RTTEST g_hTest;
146
147
148
149#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
150
151const char *getCacheAss(unsigned u)
152{
153 if (u == 0)
154 return "res0 ";
155 if (u == 1)
156 return "direct";
157 if (u >= 256)
158 return "???";
159
160 char *pszRet = NULL;
161 RTStrAPrintf(&pszRet, "%d way", u);
162 RTMEM_WILL_LEAK(pszRet);
163 return pszRet;
164}
165
166
167const char *getL2CacheAss(unsigned u)
168{
169 switch (u)
170 {
171 case 0: return "off ";
172 case 1: return "direct";
173 case 2: return "2 way ";
174 case 3: return "res3 ";
175 case 4: return "4 way ";
176 case 5: return "res5 ";
177 case 6: return "8 way ";
178 case 7: return "res7 ";
179 case 8: return "16 way";
180 case 9: return "res9 ";
181 case 10: return "res10 ";
182 case 11: return "res11 ";
183 case 12: return "res12 ";
184 case 13: return "res13 ";
185 case 14: return "res14 ";
186 case 15: return "fully ";
187 default:
188 return "????";
189 }
190}
191
192
193/**
194 * Test and dump all possible info from the CPUID instruction.
195 *
196 * @remark Bits shared with the libc cpuid.c program. This all written by me, so no worries.
197 * @todo transform the dumping into a generic runtime function. We'll need it for logging!
198 */
199void tstASMCpuId(void)
200{
201 RTTestISub("ASMCpuId");
202
203 unsigned iBit;
204 struct
205 {
206 uint32_t uEBX, uEAX, uEDX, uECX;
207 } s;
208 if (!ASMHasCpuId())
209 {
210 RTTestIPrintf(RTTESTLVL_ALWAYS, "warning! CPU doesn't support CPUID\n");
211 return;
212 }
213
214 /*
215 * Try the 0 function and use that for checking the ASMCpuId_* variants.
216 */
217 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
218
219 uint32_t u32;
220
221 u32 = ASMCpuId_EAX(0);
222 CHECKVAL(u32, s.uEAX, "%x");
223 u32 = ASMCpuId_EBX(0);
224 CHECKVAL(u32, s.uEBX, "%x");
225 u32 = ASMCpuId_ECX(0);
226 CHECKVAL(u32, s.uECX, "%x");
227 u32 = ASMCpuId_EDX(0);
228 CHECKVAL(u32, s.uEDX, "%x");
229
230 uint32_t uECX2 = s.uECX - 1;
231 uint32_t uEDX2 = s.uEDX - 1;
232 ASMCpuId_ECX_EDX(0, &uECX2, &uEDX2);
233 CHECKVAL(uECX2, s.uECX, "%x");
234 CHECKVAL(uEDX2, s.uEDX, "%x");
235
236 uint32_t uEAX2 = s.uEAX - 1;
237 uint32_t uEBX2 = s.uEBX - 1;
238 uECX2 = s.uECX - 1;
239 uEDX2 = s.uEDX - 1;
240 ASMCpuIdExSlow(0, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
241 CHECKVAL(uEAX2, s.uEAX, "%x");
242 CHECKVAL(uEBX2, s.uEBX, "%x");
243 CHECKVAL(uECX2, s.uECX, "%x");
244 CHECKVAL(uEDX2, s.uEDX, "%x");
245
246 /*
247 * Check the extended APIC stuff.
248 */
249 uint32_t idExtApic;
250 if (ASMCpuId_EAX(0) >= 0xb)
251 {
252 uint8_t idApic = ASMGetApicId();
253 do
254 {
255 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
256 ASMCpuIdExSlow(0xb, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
257 idExtApic = ASMGetApicIdExt0B();
258 } while (ASMGetApicId() != idApic);
259
260 CHECKVAL(uEDX2, idExtApic, "%x");
261 if (idApic != (uint8_t)idExtApic && uECX2 != 0)
262 RTTestIFailed("ASMGetApicIdExt0B() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
263 }
264 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
265 {
266 uint8_t idApic = ASMGetApicId();
267 do
268 {
269 uEAX2 = uEBX2 = uECX2 = uEDX2 = UINT32_C(0x50486744);
270 ASMCpuIdExSlow(0x8000001e, 0, 0, 0, &uEAX2, &uEBX2, &uECX2, &uEDX2);
271 idExtApic = ASMGetApicIdExt8000001E();
272 } while (ASMGetApicId() != idApic);
273 CHECKVAL(uEAX2, idExtApic, "%x");
274 if (idApic != (uint8_t)idExtApic)
275 RTTestIFailed("ASMGetApicIdExt8000001E() -> %#x vs ASMGetApicId() -> %#x", idExtApic, idApic);
276 }
277
278 /*
279 * Done testing, dump the information.
280 */
281 RTTestIPrintf(RTTESTLVL_ALWAYS, "CPUID Dump\n");
282 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
283 const uint32_t cFunctions = s.uEAX;
284
285 /* raw dump */
286 RTTestIPrintf(RTTESTLVL_ALWAYS,
287 "\n"
288 " RAW Standard CPUIDs\n"
289 "Function eax ebx ecx edx\n");
290 for (unsigned iStd = 0; iStd <= cFunctions + 3; iStd++)
291 {
292 ASMCpuId_Idx_ECX(iStd, 0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
293 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
294 iStd, s.uEAX, s.uEBX, s.uECX, s.uEDX, iStd <= cFunctions ? "" : "*");
295
296 /* Some leafs output depend on the initial value of ECX.
297 * The same seems to apply to invalid standard functions */
298 if (iStd > cFunctions)
299 continue;
300 if (iStd == 0x04) /* Deterministic Cache Parameters Leaf */
301 for (uint32_t uECX = 1; s.uEAX & 0x1f; uECX++)
302 {
303 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
304 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
305 RTTESTI_CHECK_BREAK(uECX < 128);
306 }
307 else if (iStd == 0x07) /* Structured Extended Feature Flags */
308 {
309 uint32_t uMax = s.uEAX;
310 for (uint32_t uECX = 1; uECX < uMax; uECX++)
311 {
312 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
313 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
314 RTTESTI_CHECK_BREAK(uECX < 128);
315 }
316 }
317 else if (iStd == 0x0b) /* Extended Topology Enumeration Leafs */
318 for (uint32_t uECX = 1; (s.uEAX & 0x1f) && (s.uEBX & 0xffff); uECX++)
319 {
320 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
321 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
322 RTTESTI_CHECK_BREAK(uECX < 128);
323 }
324 else if (iStd == 0x0d) /* Extended State Enumeration Leafs */
325 for (uint32_t uECX = 1; s.uEAX != 0 || s.uEBX != 0 || s.uECX != 0 || s.uEDX != 0; uECX++)
326 {
327 ASMCpuId_Idx_ECX(iStd, uECX, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
328 RTTestIPrintf(RTTESTLVL_ALWAYS, " [%02x] %08x %08x %08x %08x\n", uECX, s.uEAX, s.uEBX, s.uECX, s.uEDX);
329 RTTESTI_CHECK_BREAK(uECX < 128);
330 }
331 else if ( iStd == 0x0f /* Platform quality of service monitoring (PQM) */
332 || iStd == 0x10 /* Platform quality of service enforcement (PQE) */
333 || iStd == 0x12 /* SGX Enumeration */
334 || iStd == 0x14 /* Processor Trace Enumeration */
335 || iStd == 0x17 /* SoC Vendor Attribute Enumeration */
336 || iStd == 0x18 /* Deterministic Address Translation Parameters */)
337 {
338 /** @todo */
339 }
340 else
341 {
342 u32 = ASMCpuId_EAX(iStd);
343 CHECKVAL(u32, s.uEAX, "%x");
344
345 uint32_t u32EbxMask = UINT32_MAX;
346 if (iStd == 1)
347 u32EbxMask = UINT32_C(0x00ffffff); /* Omit the local apic ID in case we're rescheduled. */
348 u32 = ASMCpuId_EBX(iStd);
349 CHECKVAL(u32 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
350
351 u32 = ASMCpuId_ECX(iStd);
352 CHECKVAL(u32, s.uECX, "%x");
353 u32 = ASMCpuId_EDX(iStd);
354 CHECKVAL(u32, s.uEDX, "%x");
355
356 uECX2 = s.uECX - 1;
357 uEDX2 = s.uEDX - 1;
358 ASMCpuId_ECX_EDX(iStd, &uECX2, &uEDX2);
359 CHECKVAL(uECX2, s.uECX, "%x");
360 CHECKVAL(uEDX2, s.uEDX, "%x");
361
362 uEAX2 = s.uEAX - 1;
363 uEBX2 = s.uEBX - 1;
364 uECX2 = s.uECX - 1;
365 uEDX2 = s.uEDX - 1;
366 ASMCpuId(iStd, &uEAX2, &uEBX2, &uECX2, &uEDX2);
367 CHECKVAL(uEAX2, s.uEAX, "%x");
368 CHECKVAL(uEBX2 & u32EbxMask, s.uEBX & u32EbxMask, "%x");
369 CHECKVAL(uECX2, s.uECX, "%x");
370 CHECKVAL(uEDX2, s.uEDX, "%x");
371 }
372 }
373
374 /*
375 * Understandable output
376 */
377 ASMCpuId(0, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
378 RTTestIPrintf(RTTESTLVL_ALWAYS,
379 "Name: %.04s%.04s%.04s\n"
380 "Support: 0-%u\n",
381 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
382 bool const fIntel = ASMIsIntelCpuEx(s.uEBX, s.uECX, s.uEDX);
383
384 /*
385 * Get Features.
386 */
387 if (cFunctions >= 1)
388 {
389 static const char * const s_apszTypes[4] = { "primary", "overdrive", "MP", "reserved" };
390 ASMCpuId(1, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
391 RTTestIPrintf(RTTESTLVL_ALWAYS,
392 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
393 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
394 "Stepping: %d\n"
395 "Type: %d (%s)\n"
396 "APIC ID: %#04x\n"
397 "Logical CPUs: %d\n"
398 "CLFLUSH Size: %d\n"
399 "Brand ID: %#04x\n",
400 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
401 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
402 ASMGetCpuStepping(s.uEAX),
403 (s.uEAX >> 12) & 0x3, s_apszTypes[(s.uEAX >> 12) & 0x3],
404 (s.uEBX >> 24) & 0xff,
405 (s.uEBX >> 16) & 0xff,
406 (s.uEBX >> 8) & 0xff,
407 (s.uEBX >> 0) & 0xff);
408
409 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
410 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
411 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
412 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
413 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
414 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
415 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
416 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
417 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
418 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX8");
419 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
420 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
421 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SEP");
422 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
423 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
424 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
425 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
426 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
427 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
428 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSN");
429 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CLFSH");
430 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 20");
431 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DS");
432 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " ACPI");
433 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
434 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
435 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE");
436 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE2");
437 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SS");
438 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " HTT");
439 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 29");
440 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 30");
441 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 31");
442 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
443
444 /** @todo check intel docs. */
445 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
446 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SSE3");
447 for (iBit = 1; iBit < 13; iBit++)
448 if (s.uECX & RT_BIT(iBit))
449 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
450 if (s.uECX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CX16");
451 for (iBit = 14; iBit < 32; iBit++)
452 if (s.uECX & RT_BIT(iBit))
453 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
454 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
455 }
456 if (ASMCpuId_EAX(0) >= 0xb)
457 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 0b): %#010x\n", ASMGetApicIdExt0B());
458
459 /*
460 * Extended.
461 * Implemented after AMD specs.
462 */
463 /** @todo check out the intel specs. */
464 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
465 if (!s.uEAX && !s.uEBX && !s.uECX && !s.uEDX)
466 {
467 RTTestIPrintf(RTTESTLVL_ALWAYS, "No extended CPUID info? Check the manual on how to detect this...\n");
468 return;
469 }
470 const uint32_t cExtFunctions = s.uEAX | 0x80000000;
471
472 /* raw dump */
473 RTTestIPrintf(RTTESTLVL_ALWAYS,
474 "\n"
475 " RAW Extended CPUIDs\n"
476 "Function eax ebx ecx edx\n");
477 for (unsigned iExt = 0x80000000; iExt <= cExtFunctions + 3; iExt++)
478 {
479 ASMCpuId(iExt, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
480 RTTestIPrintf(RTTESTLVL_ALWAYS, "%08x %08x %08x %08x %08x%s\n",
481 iExt, s.uEAX, s.uEBX, s.uECX, s.uEDX, iExt <= cExtFunctions ? "" : "*");
482
483 if (iExt > cExtFunctions)
484 continue; /* Invalid extended functions seems change the value if ECX changes */
485 if (iExt == 0x8000001d)
486 continue; /* Takes cache level in ecx. */
487
488 u32 = ASMCpuId_EAX(iExt);
489 CHECKVAL(u32, s.uEAX, "%x");
490 u32 = ASMCpuId_EBX(iExt);
491 CHECKVAL(u32, s.uEBX, "%x");
492 u32 = ASMCpuId_ECX(iExt);
493 CHECKVAL(u32, s.uECX, "%x");
494 u32 = ASMCpuId_EDX(iExt);
495 CHECKVAL(u32, s.uEDX, "%x");
496
497 uECX2 = s.uECX - 1;
498 uEDX2 = s.uEDX - 1;
499 ASMCpuId_ECX_EDX(iExt, &uECX2, &uEDX2);
500 CHECKVAL(uECX2, s.uECX, "%x");
501 CHECKVAL(uEDX2, s.uEDX, "%x");
502
503 uEAX2 = s.uEAX - 1;
504 uEBX2 = s.uEBX - 1;
505 uECX2 = s.uECX - 1;
506 uEDX2 = s.uEDX - 1;
507 ASMCpuId(iExt, &uEAX2, &uEBX2, &uECX2, &uEDX2);
508 CHECKVAL(uEAX2, s.uEAX, "%x");
509 CHECKVAL(uEBX2, s.uEBX, "%x");
510 CHECKVAL(uECX2, s.uECX, "%x");
511 CHECKVAL(uEDX2, s.uEDX, "%x");
512 }
513
514 /*
515 * Understandable output
516 */
517 ASMCpuId(0x80000000, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
518 RTTestIPrintf(RTTESTLVL_ALWAYS,
519 "Ext Name: %.4s%.4s%.4s\n"
520 "Ext Supports: 0x80000000-%#010x\n",
521 &s.uEBX, &s.uEDX, &s.uECX, s.uEAX);
522
523 if (cExtFunctions >= 0x80000001)
524 {
525 ASMCpuId(0x80000001, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
526 RTTestIPrintf(RTTESTLVL_ALWAYS,
527 "Family: %#x \tExtended: %#x \tEffective: %#x\n"
528 "Model: %#x \tExtended: %#x \tEffective: %#x\n"
529 "Stepping: %d\n"
530 "Brand ID: %#05x\n",
531 (s.uEAX >> 8) & 0xf, (s.uEAX >> 20) & 0x7f, ASMGetCpuFamily(s.uEAX),
532 (s.uEAX >> 4) & 0xf, (s.uEAX >> 16) & 0x0f, ASMGetCpuModel(s.uEAX, fIntel),
533 ASMGetCpuStepping(s.uEAX),
534 s.uEBX & 0xfff);
535
536 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features EDX: ");
537 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FPU");
538 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VME");
539 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " DE");
540 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE");
541 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TSC");
542 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MSR");
543 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAE");
544 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCE");
545 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMPXCHG8B");
546 if (s.uEDX & RT_BIT(9)) RTTestIPrintf(RTTESTLVL_ALWAYS, " APIC");
547 if (s.uEDX & RT_BIT(10)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 10");
548 if (s.uEDX & RT_BIT(11)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SysCallSysRet");
549 if (s.uEDX & RT_BIT(12)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MTRR");
550 if (s.uEDX & RT_BIT(13)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PGE");
551 if (s.uEDX & RT_BIT(14)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MCA");
552 if (s.uEDX & RT_BIT(15)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CMOV");
553 if (s.uEDX & RT_BIT(16)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PAT");
554 if (s.uEDX & RT_BIT(17)) RTTestIPrintf(RTTESTLVL_ALWAYS, " PSE36");
555 if (s.uEDX & RT_BIT(18)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 18");
556 if (s.uEDX & RT_BIT(19)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 19");
557 if (s.uEDX & RT_BIT(20)) RTTestIPrintf(RTTESTLVL_ALWAYS, " NX");
558 if (s.uEDX & RT_BIT(21)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 21");
559 if (s.uEDX & RT_BIT(22)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MmxExt");
560 if (s.uEDX & RT_BIT(23)) RTTestIPrintf(RTTESTLVL_ALWAYS, " MMX");
561 if (s.uEDX & RT_BIT(24)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FXSR");
562 if (s.uEDX & RT_BIT(25)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FastFXSR");
563 if (s.uEDX & RT_BIT(26)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 26");
564 if (s.uEDX & RT_BIT(27)) RTTestIPrintf(RTTESTLVL_ALWAYS, " RDTSCP");
565 if (s.uEDX & RT_BIT(28)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 28");
566 if (s.uEDX & RT_BIT(29)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LongMode");
567 if (s.uEDX & RT_BIT(30)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNowExt");
568 if (s.uEDX & RT_BIT(31)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3DNow");
569 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
570
571 RTTestIPrintf(RTTESTLVL_ALWAYS, "Features ECX: ");
572 if (s.uECX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " LahfSahf");
573 if (s.uECX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " CmpLegacy");
574 if (s.uECX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " SVM");
575 if (s.uECX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 3");
576 if (s.uECX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " AltMovCr8");
577 for (iBit = 5; iBit < 32; iBit++)
578 if (s.uECX & RT_BIT(iBit))
579 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
580 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
581 }
582
583 char szString[4*4*3+1] = {0};
584 if (cExtFunctions >= 0x80000002)
585 ASMCpuId(0x80000002, &szString[0 + 0], &szString[0 + 4], &szString[0 + 8], &szString[0 + 12]);
586 if (cExtFunctions >= 0x80000003)
587 ASMCpuId(0x80000003, &szString[16 + 0], &szString[16 + 4], &szString[16 + 8], &szString[16 + 12]);
588 if (cExtFunctions >= 0x80000004)
589 ASMCpuId(0x80000004, &szString[32 + 0], &szString[32 + 4], &szString[32 + 8], &szString[32 + 12]);
590 if (cExtFunctions >= 0x80000002)
591 RTTestIPrintf(RTTESTLVL_ALWAYS, "Full Name: %s\n", szString);
592
593 if (cExtFunctions >= 0x80000005)
594 {
595 ASMCpuId(0x80000005, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
596 RTTestIPrintf(RTTESTLVL_ALWAYS,
597 "TLB 2/4M Instr/Uni: %s %3d entries\n"
598 "TLB 2/4M Data: %s %3d entries\n",
599 getCacheAss((s.uEAX >> 8) & 0xff), (s.uEAX >> 0) & 0xff,
600 getCacheAss((s.uEAX >> 24) & 0xff), (s.uEAX >> 16) & 0xff);
601 RTTestIPrintf(RTTESTLVL_ALWAYS,
602 "TLB 4K Instr/Uni: %s %3d entries\n"
603 "TLB 4K Data: %s %3d entries\n",
604 getCacheAss((s.uEBX >> 8) & 0xff), (s.uEBX >> 0) & 0xff,
605 getCacheAss((s.uEBX >> 24) & 0xff), (s.uEBX >> 16) & 0xff);
606 RTTestIPrintf(RTTESTLVL_ALWAYS,
607 "L1 Instr Cache Line Size: %d bytes\n"
608 "L1 Instr Cache Lines Per Tag: %d\n"
609 "L1 Instr Cache Associativity: %s\n"
610 "L1 Instr Cache Size: %d KB\n",
611 (s.uEDX >> 0) & 0xff,
612 (s.uEDX >> 8) & 0xff,
613 getCacheAss((s.uEDX >> 16) & 0xff),
614 (s.uEDX >> 24) & 0xff);
615 RTTestIPrintf(RTTESTLVL_ALWAYS,
616 "L1 Data Cache Line Size: %d bytes\n"
617 "L1 Data Cache Lines Per Tag: %d\n"
618 "L1 Data Cache Associativity: %s\n"
619 "L1 Data Cache Size: %d KB\n",
620 (s.uECX >> 0) & 0xff,
621 (s.uECX >> 8) & 0xff,
622 getCacheAss((s.uECX >> 16) & 0xff),
623 (s.uECX >> 24) & 0xff);
624 }
625
626 if (cExtFunctions >= 0x80000006)
627 {
628 ASMCpuId(0x80000006, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
629 RTTestIPrintf(RTTESTLVL_ALWAYS,
630 "L2 TLB 2/4M Instr/Uni: %s %4d entries\n"
631 "L2 TLB 2/4M Data: %s %4d entries\n",
632 getL2CacheAss((s.uEAX >> 12) & 0xf), (s.uEAX >> 0) & 0xfff,
633 getL2CacheAss((s.uEAX >> 28) & 0xf), (s.uEAX >> 16) & 0xfff);
634 RTTestIPrintf(RTTESTLVL_ALWAYS,
635 "L2 TLB 4K Instr/Uni: %s %4d entries\n"
636 "L2 TLB 4K Data: %s %4d entries\n",
637 getL2CacheAss((s.uEBX >> 12) & 0xf), (s.uEBX >> 0) & 0xfff,
638 getL2CacheAss((s.uEBX >> 28) & 0xf), (s.uEBX >> 16) & 0xfff);
639 RTTestIPrintf(RTTESTLVL_ALWAYS,
640 "L2 Cache Line Size: %d bytes\n"
641 "L2 Cache Lines Per Tag: %d\n"
642 "L2 Cache Associativity: %s\n"
643 "L2 Cache Size: %d KB\n",
644 (s.uEDX >> 0) & 0xff,
645 (s.uEDX >> 8) & 0xf,
646 getL2CacheAss((s.uEDX >> 12) & 0xf),
647 (s.uEDX >> 16) & 0xffff);
648 }
649
650 if (cExtFunctions >= 0x80000007)
651 {
652 ASMCpuId(0x80000007, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
653 RTTestIPrintf(RTTESTLVL_ALWAYS, "APM Features: ");
654 if (s.uEDX & RT_BIT(0)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TS");
655 if (s.uEDX & RT_BIT(1)) RTTestIPrintf(RTTESTLVL_ALWAYS, " FID");
656 if (s.uEDX & RT_BIT(2)) RTTestIPrintf(RTTESTLVL_ALWAYS, " VID");
657 if (s.uEDX & RT_BIT(3)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TTP");
658 if (s.uEDX & RT_BIT(4)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TM");
659 if (s.uEDX & RT_BIT(5)) RTTestIPrintf(RTTESTLVL_ALWAYS, " STC");
660 if (s.uEDX & RT_BIT(6)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 6");
661 if (s.uEDX & RT_BIT(7)) RTTestIPrintf(RTTESTLVL_ALWAYS, " 7");
662 if (s.uEDX & RT_BIT(8)) RTTestIPrintf(RTTESTLVL_ALWAYS, " TscInvariant");
663 for (iBit = 9; iBit < 32; iBit++)
664 if (s.uEDX & RT_BIT(iBit))
665 RTTestIPrintf(RTTESTLVL_ALWAYS, " %d", iBit);
666 RTTestIPrintf(RTTESTLVL_ALWAYS, "\n");
667 }
668
669 if (cExtFunctions >= 0x80000008)
670 {
671 ASMCpuId(0x80000008, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
672 RTTestIPrintf(RTTESTLVL_ALWAYS,
673 "Physical Address Width: %d bits\n"
674 "Virtual Address Width: %d bits\n"
675 "Guest Physical Address Width: %d bits\n",
676 (s.uEAX >> 0) & 0xff,
677 (s.uEAX >> 8) & 0xff,
678 (s.uEAX >> 16) & 0xff);
679 RTTestIPrintf(RTTESTLVL_ALWAYS,
680 "Physical Core Count: %d\n",
681 ((s.uECX >> 0) & 0xff) + 1);
682 if ((s.uECX >> 12) & 0xf)
683 RTTestIPrintf(RTTESTLVL_ALWAYS, "ApicIdCoreIdSize: %d bits\n", (s.uECX >> 12) & 0xf);
684 }
685
686 if (cExtFunctions >= 0x8000000a)
687 {
688 ASMCpuId(0x8000000a, &s.uEAX, &s.uEBX, &s.uECX, &s.uEDX);
689 RTTestIPrintf(RTTESTLVL_ALWAYS,
690 "SVM Revision: %d (%#x)\n"
691 "Number of Address Space IDs: %d (%#x)\n",
692 s.uEAX & 0xff, s.uEAX & 0xff,
693 s.uEBX, s.uEBX);
694 }
695 if (ASMCpuId_EAX(UINT32_C(0x80000000)) >= UINT32_C(0x8000001E))
696 RTTestIPrintf(RTTESTLVL_ALWAYS, "APIC ID(Ext 8000001b): %#010x\n", ASMGetApicIdExt8000001E());
697}
698
699# if 0
700static void bruteForceCpuId(void)
701{
702 RTTestISub("brute force CPUID leafs");
703 uint32_t auPrevValues[4] = { 0, 0, 0, 0};
704 uint32_t uLeaf = 0;
705 do
706 {
707 uint32_t auValues[4];
708 ASMCpuIdExSlow(uLeaf, 0, 0, 0, &auValues[0], &auValues[1], &auValues[2], &auValues[3]);
709 if ( (auValues[0] != auPrevValues[0] && auValues[0] != uLeaf)
710 || (auValues[1] != auPrevValues[1] && auValues[1] != 0)
711 || (auValues[2] != auPrevValues[2] && auValues[2] != 0)
712 || (auValues[3] != auPrevValues[3] && auValues[3] != 0)
713 || (uLeaf & (UINT32_C(0x08000000) - UINT32_C(1))) == 0)
714 {
715 RTTestIPrintf(RTTESTLVL_ALWAYS,
716 "%08x: %08x %08x %08x %08x\n", uLeaf,
717 auValues[0], auValues[1], auValues[2], auValues[3]);
718 }
719 auPrevValues[0] = auValues[0];
720 auPrevValues[1] = auValues[1];
721 auPrevValues[2] = auValues[2];
722 auPrevValues[3] = auValues[3];
723
724 //uint32_t uSubLeaf = 0;
725 //do
726 //{
727 //
728 //
729 //} while (false);
730 } while (uLeaf++ < UINT32_MAX);
731}
732# endif
733
734#endif /* AMD64 || X86 */
735
736#define TEST_READ(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
737 do { *a_pVar = a_Val; CHECKOP(a_Function(a_pVar), a_Val, a_Fmt, a_Type); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
738
739DECLINLINE(void) tstASMAtomicReadU8Worker(uint8_t volatile *pu8)
740{
741 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 0);
742 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 1);
743 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 2);
744 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 16);
745 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
746 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 32);
747 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 127);
748 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 128);
749 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 169);
750 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 239);
751 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 254);
752 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicReadU8, 255);
753
754 int8_t volatile *pi8 = (int8_t volatile *)pu8;
755 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MAX);
756 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, INT8_MIN);
757 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, 42);
758 TEST_READ(pi8, uint8_t, "%d", ASMAtomicReadS8, -21);
759
760 bool volatile *pf = (bool volatile *)pu8;
761 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, true);
762 TEST_READ(pf, bool, "%d", ASMAtomicReadBool, false);
763}
764
765
766DECLINLINE(void) tstASMAtomicUoReadU8Worker(uint8_t volatile *pu8)
767{
768 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 0);
769 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 1);
770 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 2);
771 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 16);
772 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
773 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 32);
774 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 127);
775 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 128);
776 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 169);
777 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 239);
778 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 254);
779 TEST_READ(pu8, uint8_t, "%#x", ASMAtomicUoReadU8, 255);
780
781 int8_t volatile *pi8 = (int8_t volatile *)pu8;
782 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MAX);
783 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, INT8_MIN);
784 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, 42);
785 TEST_READ(pi8, uint8_t, "%d", ASMAtomicUoReadS8, -21);
786
787 bool volatile *pf = (bool volatile *)pu8;
788 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, true);
789 TEST_READ(pf, bool, "%d", ASMAtomicUoReadBool, false);
790}
791
792
793DECLINLINE(void) tstASMAtomicReadU16Worker(uint16_t volatile *pu16)
794{
795 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 0);
796 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, 19983);
797 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, INT16_MAX);
798 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicReadU16, UINT16_MAX);
799
800 int16_t volatile *pi16 = (int16_t volatile *)pu16;
801 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MAX);
802 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, INT16_MIN);
803 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, 42);
804 TEST_READ(pi16, uint16_t, "%d", ASMAtomicReadS16, -21);
805}
806
807
808DECLINLINE(void) tstASMAtomicUoReadU16Worker(uint16_t volatile *pu16)
809{
810 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 0);
811 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, 19983);
812 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, INT16_MAX);
813 TEST_READ(pu16, uint16_t, "%#x", ASMAtomicUoReadU16, UINT16_MAX);
814
815 int16_t volatile *pi16 = (int16_t volatile *)pu16;
816 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MAX);
817 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, INT16_MIN);
818 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, 42);
819 TEST_READ(pi16, uint16_t, "%d", ASMAtomicUoReadS16, -21);
820}
821
822
823DECLINLINE(void) tstASMAtomicReadU32Worker(uint32_t volatile *pu32)
824{
825 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 0);
826 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, 19983);
827 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT16_MAX);
828 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT16_MAX);
829 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M-1);
830 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1M+1);
831 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G-1);
832 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, _1G+1);
833 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, INT32_MAX);
834 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicReadU32, UINT32_MAX);
835
836 int32_t volatile *pi32 = (int32_t volatile *)pu32;
837 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MAX);
838 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, INT32_MIN);
839 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, 42);
840 TEST_READ(pi32, uint32_t, "%d", ASMAtomicReadS32, -21);
841
842#if ARCH_BITS == 32
843 size_t volatile *pcb = (size_t volatile *)pu32;
844 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
845 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
846 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
847
848 void * volatile *ppv = (void * volatile *)pu32;
849 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
850 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
851
852 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
853 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
854 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
855
856 ASMAtomicReadHandle(phEvt, &hEvt);
857 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
858#endif
859}
860
861
862DECLINLINE(void) tstASMAtomicUoReadU32Worker(uint32_t volatile *pu32)
863{
864 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 0);
865 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, 19983);
866 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT16_MAX);
867 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT16_MAX);
868 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M-1);
869 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1M+1);
870 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G-1);
871 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, _1G+1);
872 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, INT32_MAX);
873 TEST_READ(pu32, uint32_t, "%#x", ASMAtomicUoReadU32, UINT32_MAX);
874
875 int32_t volatile *pi32 = (int32_t volatile *)pu32;
876 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MAX);
877 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, INT32_MIN);
878 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, 42);
879 TEST_READ(pi32, uint32_t, "%d", ASMAtomicUoReadS32, -21);
880
881#if ARCH_BITS == 32
882 size_t volatile *pcb = (size_t volatile *)pu32;
883 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
884 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
885 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
886
887 void * volatile *ppv = (void * volatile *)pu32;
888 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
889 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
890
891 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
892 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
893 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
894
895 ASMAtomicUoReadHandle(phEvt, &hEvt);
896 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
897#endif
898}
899
900
901DECLINLINE(void) tstASMAtomicReadU64Worker(uint64_t volatile *pu64)
902{
903 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 0);
904 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, 19983);
905 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT16_MAX);
906 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT16_MAX);
907 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M-1);
908 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1M+1);
909 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G-1);
910 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, _1G+1);
911 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT32_MAX);
912 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT32_MAX);
913 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, INT64_MAX);
914 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_MAX);
915 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicReadU64, UINT64_C(0x450872549687134));
916
917 int64_t volatile *pi64 = (int64_t volatile *)pu64;
918 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MAX);
919 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, INT64_MIN);
920 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, 42);
921 TEST_READ(pi64, uint64_t, "%d", ASMAtomicReadS64, -21);
922
923#if ARCH_BITS == 64
924 size_t volatile *pcb = (size_t volatile *)pu64;
925 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, 0);
926 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)2);
927 TEST_READ(pcb, size_t, "%#llz", ASMAtomicReadZ, ~(size_t)0 / 4);
928
929 void * volatile *ppv = (void * volatile *)pu64;
930 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, NULL);
931 TEST_READ(ppv, void *, "%p", ASMAtomicReadPtr, (void *)~(uintptr_t)42);
932
933 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
934 RTSEMEVENT hEvt = ASMAtomicReadPtrT(phEvt, RTSEMEVENT);
935 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
936
937 ASMAtomicReadHandle(phEvt, &hEvt);
938 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
939#endif
940}
941
942
943DECLINLINE(void) tstASMAtomicUoReadU64Worker(uint64_t volatile *pu64)
944{
945 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 0);
946 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, 19983);
947 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT16_MAX);
948 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT16_MAX);
949 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M-1);
950 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1M+1);
951 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G-1);
952 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, _1G+1);
953 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT32_MAX);
954 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT32_MAX);
955 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, INT64_MAX);
956 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_MAX);
957 TEST_READ(pu64, uint64_t, "%#llx", ASMAtomicUoReadU64, UINT64_C(0x450872549687134));
958
959 int64_t volatile *pi64 = (int64_t volatile *)pu64;
960 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MAX);
961 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, INT64_MIN);
962 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, 42);
963 TEST_READ(pi64, uint64_t, "%d", ASMAtomicUoReadS64, -21);
964
965#if ARCH_BITS == 64
966 size_t volatile *pcb = (size_t volatile *)pu64;
967 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, 0);
968 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)2);
969 TEST_READ(pcb, size_t, "%#llz", ASMAtomicUoReadZ, ~(size_t)0 / 4);
970
971 void * volatile *ppv = (void * volatile *)pu64;
972 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, NULL);
973 TEST_READ(ppv, void *, "%p", ASMAtomicUoReadPtr, (void *)~(uintptr_t)42);
974
975 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
976 RTSEMEVENT hEvt = ASMAtomicUoReadPtrT(phEvt, RTSEMEVENT);
977 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
978
979 ASMAtomicUoReadHandle(phEvt, &hEvt);
980 CHECKVAL(hEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
981#endif
982}
983
984
985static void tstASMAtomicRead(void)
986{
987 DO_SIMPLE_TEST(ASMAtomicReadU8, uint8_t);
988 DO_SIMPLE_TEST(ASMAtomicUoReadU8, uint8_t);
989
990 DO_SIMPLE_TEST(ASMAtomicReadU16, uint16_t);
991 DO_SIMPLE_TEST(ASMAtomicUoReadU16, uint16_t);
992
993 DO_SIMPLE_TEST(ASMAtomicReadU32, uint32_t);
994 DO_SIMPLE_TEST(ASMAtomicUoReadU32, uint32_t);
995
996 DO_SIMPLE_TEST(ASMAtomicReadU64, uint64_t);
997 DO_SIMPLE_TEST(ASMAtomicUoReadU64, uint64_t);
998}
999
1000
1001#define TEST_WRITE(a_pVar, a_Type, a_Fmt, a_Function, a_Val) \
1002 do { a_Function(a_pVar, a_Val); CHECKVAL(*a_pVar, a_Val, a_Fmt); } while (0)
1003
1004DECLINLINE(void) tstASMAtomicWriteU8Worker(uint8_t volatile *pu8)
1005{
1006 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 0);
1007 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 1);
1008 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 2);
1009 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 16);
1010 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1011 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 32);
1012 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 127);
1013 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 128);
1014 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 169);
1015 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 239);
1016 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 254);
1017 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicWriteU8, 255);
1018
1019 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1020 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MIN);
1021 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, INT8_MAX);
1022 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, 42);
1023 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicWriteS8, -41);
1024
1025 volatile bool *pf = (volatile bool *)pu8;
1026 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, true);
1027 TEST_WRITE(pf, bool, "%d", ASMAtomicWriteBool, false);
1028}
1029
1030
1031DECLINLINE(void) tstASMAtomicUoWriteU8Worker(uint8_t volatile *pu8)
1032{
1033 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 0);
1034 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 1);
1035 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 2);
1036 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 16);
1037 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1038 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 32);
1039 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 127);
1040 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 128);
1041 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 169);
1042 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 239);
1043 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 254);
1044 TEST_WRITE(pu8, uint8_t, "%#x", ASMAtomicUoWriteU8, 255);
1045
1046 volatile int8_t *pi8 = (volatile int8_t *)pu8;
1047 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MIN);
1048 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, INT8_MAX);
1049 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, 42);
1050 TEST_WRITE(pi8, int8_t, "%d", ASMAtomicUoWriteS8, -41);
1051
1052 volatile bool *pf = (volatile bool *)pu8;
1053 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, true);
1054 TEST_WRITE(pf, bool, "%d", ASMAtomicUoWriteBool, false);
1055}
1056
1057
1058DECLINLINE(void) tstASMAtomicWriteU16Worker(uint16_t volatile *pu16)
1059{
1060 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 0);
1061 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, 19983);
1062 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, INT16_MAX);
1063 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicWriteU16, UINT16_MAX);
1064
1065 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1066 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MIN);
1067 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, INT16_MAX);
1068 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, 42);
1069 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicWriteS16, -41);
1070}
1071
1072
1073DECLINLINE(void) tstASMAtomicUoWriteU16Worker(uint16_t volatile *pu16)
1074{
1075 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 0);
1076 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, 19983);
1077 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, INT16_MAX);
1078 TEST_WRITE(pu16, uint16_t, "%#x", ASMAtomicUoWriteU16, UINT16_MAX);
1079
1080 volatile int16_t *pi16 = (volatile int16_t *)pu16;
1081 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MIN);
1082 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, INT16_MAX);
1083 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, 42);
1084 TEST_WRITE(pi16, int16_t, "%d", ASMAtomicUoWriteS16, -41);
1085}
1086
1087
1088DECLINLINE(void) tstASMAtomicWriteU32Worker(uint32_t volatile *pu32)
1089{
1090 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 0);
1091 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, 19983);
1092 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT16_MAX);
1093 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT16_MAX);
1094 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M-1);
1095 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1M+1);
1096 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G-1);
1097 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, _1G+1);
1098 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, INT32_MAX);
1099 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicWriteU32, UINT32_MAX);
1100
1101 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1102 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MIN);
1103 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, INT32_MAX);
1104 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, 42);
1105 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicWriteS32, -41);
1106
1107#if ARCH_BITS == 32
1108 size_t volatile *pcb = (size_t volatile *)pu32;
1109 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1110 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1111
1112 void * volatile *ppv = (void * volatile *)pu32;
1113 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1114 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1115
1116 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1117 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1118
1119 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1120 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1121#endif
1122}
1123
1124
1125DECLINLINE(void) tstASMAtomicUoWriteU32Worker(uint32_t volatile *pu32)
1126{
1127 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 0);
1128 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, 19983);
1129 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT16_MAX);
1130 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT16_MAX);
1131 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M-1);
1132 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1M+1);
1133 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G-1);
1134 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, _1G+1);
1135 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, INT32_MAX);
1136 TEST_WRITE(pu32, uint32_t, "%#x", ASMAtomicUoWriteU32, UINT32_MAX);
1137
1138 volatile int32_t *pi32 = (volatile int32_t *)pu32;
1139 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MIN);
1140 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, INT32_MAX);
1141 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, 42);
1142 TEST_WRITE(pi32, int32_t, "%d", ASMAtomicUoWriteS32, -41);
1143
1144#if ARCH_BITS == 32
1145 size_t volatile *pcb = (size_t volatile *)pu32;
1146 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1147 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1148
1149 void * volatile *ppv = (void * volatile *)pu32;
1150 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1151 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1152
1153 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1154 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1155
1156 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1157 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1158#endif
1159}
1160
1161
1162DECLINLINE(void) tstASMAtomicWriteU64Worker(uint64_t volatile *pu64)
1163{
1164 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 0);
1165 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, 19983);
1166 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT16_MAX);
1167 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT16_MAX);
1168 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M-1);
1169 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1M+1);
1170 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G-1);
1171 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, _1G+1);
1172 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT32_MAX);
1173 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT32_MAX);
1174 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, INT64_MAX);
1175 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_MAX);
1176 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicWriteU64, UINT64_C(0x450872549687134));
1177
1178 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1179 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MIN);
1180 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, INT64_MAX);
1181 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicWriteS64, 42);
1182
1183#if ARCH_BITS == 64
1184 size_t volatile *pcb = (size_t volatile *)pu64;
1185 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, ~(size_t)42);
1186 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicWriteZ, 42);
1187
1188 void * volatile *ppv = (void * volatile *)pu64;
1189 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, NULL);
1190 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicWritePtrVoid, (void *)~(uintptr_t)12938754);
1191
1192 ASMAtomicWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1193 ASMAtomicWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1194
1195 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1196 ASMAtomicWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1197#endif
1198}
1199
1200
1201DECLINLINE(void) tstASMAtomicUoWriteU64Worker(uint64_t volatile *pu64)
1202{
1203 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 0);
1204 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, 19983);
1205 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT16_MAX);
1206 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT16_MAX);
1207 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M-1);
1208 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1M+1);
1209 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G-1);
1210 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, _1G+1);
1211 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT32_MAX);
1212 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT32_MAX);
1213 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, INT64_MAX);
1214 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_MAX);
1215 TEST_WRITE(pu64, uint64_t, "%#llx", ASMAtomicUoWriteU64, UINT64_C(0x450872549687134));
1216
1217 volatile int64_t *pi64 = (volatile int64_t *)pu64;
1218 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MIN);
1219 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, INT64_MAX);
1220 TEST_WRITE(pi64, int64_t, "%d", ASMAtomicUoWriteS64, 42);
1221
1222#if ARCH_BITS == 64
1223 size_t volatile *pcb = (size_t volatile *)pu64;
1224 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, ~(size_t)42);
1225 TEST_WRITE(pcb, size_t, "%#zx", ASMAtomicUoWriteZ, 42);
1226
1227 void * volatile *ppv = (void * volatile *)pu64;
1228 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, NULL);
1229 TEST_WRITE(ppv, void *, "%#zx", ASMAtomicUoWritePtrVoid, (void *)~(uintptr_t)12938754);
1230
1231 ASMAtomicUoWriteNullPtr(ppv); CHECKVAL(*ppv, NULL, "%p");
1232 ASMAtomicUoWritePtr(ppv, (void *)~(intptr_t)2322434); CHECKVAL(*ppv, (void *)~(intptr_t)2322434, "%p");
1233
1234 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1235 ASMAtomicUoWriteHandle(phEvt, (RTSEMEVENT)(uintptr_t)99753456); CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)99753456, "%p");
1236#endif
1237}
1238
1239static void tstASMAtomicWrite(void)
1240{
1241 DO_SIMPLE_TEST(ASMAtomicWriteU8, uint8_t);
1242 DO_SIMPLE_TEST(ASMAtomicUoWriteU8, uint8_t);
1243
1244 DO_SIMPLE_TEST(ASMAtomicWriteU16, uint16_t);
1245 DO_SIMPLE_TEST(ASMAtomicUoWriteU16, uint16_t);
1246
1247 DO_SIMPLE_TEST(ASMAtomicWriteU32, uint32_t);
1248 DO_SIMPLE_TEST(ASMAtomicUoWriteU32, uint32_t);
1249
1250 DO_SIMPLE_TEST(ASMAtomicWriteU64, uint64_t);
1251 DO_SIMPLE_TEST(ASMAtomicUoWriteU64, uint64_t);
1252}
1253
1254
1255DECLINLINE(void) tstASMAtomicXchgU8Worker(uint8_t volatile *pu8)
1256{
1257 *pu8 = 0;
1258 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, 1), 0, 1);
1259 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xff)), 1, UINT8_C(0xff));
1260 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0x87)), UINT8_C(0xff), UINT8_C(0x87));
1261 CHECK_OP_AND_VAL(uint8_t, "%#x", pu8, ASMAtomicXchgU8(pu8, UINT8_C(0xfe)), UINT8_C(0x87), UINT8_C(0xfe));
1262
1263 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1264 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(-4)), INT8_C(-2), INT8_C(-4));
1265 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_C(4)), INT8_C(-4), INT8_C(4));
1266 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MAX), INT8_C(4), INT8_MAX);
1267 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, INT8_MIN), INT8_MAX, INT8_MIN);
1268 CHECK_OP_AND_VAL(int8_t, "%d", pi8, ASMAtomicXchgS8(pi8, 1), INT8_MIN, 1);
1269
1270 bool volatile *pf = (bool volatile *)pu8;
1271 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), true, false);
1272 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, false), false, false);
1273 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicXchgBool(pf, true), false, true);
1274}
1275
1276
1277DECLINLINE(void) tstASMAtomicXchgU16Worker(uint16_t volatile *pu16)
1278{
1279 *pu16 = 0;
1280 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 1), 0, 1);
1281 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, 0), 1, 0);
1282 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_MAX), 0, UINT16_MAX);
1283 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x7fff)), UINT16_MAX, UINT16_C(0x7fff));
1284 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0x8765)), UINT16_C(0x7fff), UINT16_C(0x8765));
1285 CHECK_OP_AND_VAL(uint16_t, "%#x", pu16, ASMAtomicXchgU16(pu16, UINT16_C(0xfffe)), UINT16_C(0x8765), UINT16_C(0xfffe));
1286
1287 int16_t volatile *pi16 = (int16_t volatile *)pu16;
1288 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MIN), INT16_C(-2), INT16_MIN);
1289 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, INT16_MAX), INT16_MIN, INT16_MAX);
1290 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, -8), INT16_MAX, -8);
1291 CHECK_OP_AND_VAL(int16_t, "%d", pi16, ASMAtomicXchgS16(pi16, 8), -8, 8);
1292}
1293
1294
1295DECLINLINE(void) tstASMAtomicXchgU32Worker(uint32_t volatile *pu32)
1296{
1297 *pu32 = 0;
1298 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 1), 0, 1);
1299 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, 0), 1, 0);
1300 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_MAX), 0, UINT32_MAX);
1301 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0x87654321)), UINT32_MAX, UINT32_C(0x87654321));
1302 CHECK_OP_AND_VAL(uint32_t, "%#x", pu32, ASMAtomicXchgU32(pu32, UINT32_C(0xfffffffe)), UINT32_C(0x87654321), UINT32_C(0xfffffffe));
1303
1304 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1305 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MIN), INT32_C(-2), INT32_MIN);
1306 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, INT32_MAX), INT32_MIN, INT32_MAX);
1307 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, -16), INT32_MAX, -16);
1308 CHECK_OP_AND_VAL(int32_t, "%d", pi32, ASMAtomicXchgS32(pi32, 16), -16, 16);
1309
1310#if ARCH_BITS == 32
1311 size_t volatile *pcb = (size_t volatile *)pu32;
1312 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0x9481239b)), 0x10, UINT32_C(0x9481239b));
1313 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT32_C(0xcdef1234)), UINT32_C(0x9481239b), UINT32_C(0xcdef1234));
1314#endif
1315
1316#if R0_ARCH_BITS == 32
1317 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu32;
1318 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT32_C(0x80341237)), UINT32_C(0xcdef1234), UINT32_C(0x80341237));
1319#endif
1320}
1321
1322
1323DECLINLINE(void) tstASMAtomicXchgU64Worker(uint64_t volatile *pu64)
1324{
1325 *pu64 = 0;
1326 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 1), 0, 1);
1327 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, 0), 1, 0);
1328 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_MAX), 0, UINT64_MAX);
1329 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfedcba0987654321)), UINT64_MAX, UINT64_C(0xfedcba0987654321));
1330 CHECK_OP_AND_VAL(uint64_t, "%#llx", pu64, ASMAtomicXchgU64(pu64, UINT64_C(0xfffffffffffffffe)), UINT64_C(0xfedcba0987654321), UINT64_C(0xfffffffffffffffe));
1331
1332 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1333 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MAX), -2, INT64_MAX);
1334 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, INT64_MIN), INT64_MAX, INT64_MIN);
1335 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, -32), INT64_MIN, -32);
1336 CHECK_OP_AND_VAL(int64_t, "%lld", pi64, ASMAtomicXchgS64(pi64, 32), -32, 32);
1337
1338#if ARCH_BITS == 64
1339 size_t volatile *pcb = (size_t volatile *)pu64;
1340 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0x94812396759)), 0x20, UINT64_C(0x94812396759));
1341 CHECK_OP_AND_VAL(size_t, "%#zx", pcb, ASMAtomicXchgZ(pcb, UINT64_C(0xcdef1234abdf7896)), UINT64_C(0x94812396759), UINT64_C(0xcdef1234abdf7896));
1342#endif
1343
1344#if R0_ARCH_BITS == 64
1345 RTR0PTR volatile *pR0Ptr = (RTR0PTR volatile *)pu64;
1346 CHECK_OP_AND_VAL(size_t, "%#llx", pcb, ASMAtomicXchgR0Ptr(pR0Ptr, UINT64_C(0xfedc1234567890ab)), UINT64_C(0xcdef1234abdf7896), UINT64_C(0xfedc1234567890ab));
1347#endif
1348}
1349
1350
1351DECLINLINE(void) tstASMAtomicXchgPtrWorker(void * volatile *ppv)
1352{
1353 *ppv = NULL;
1354 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0)), NULL, (void *)(~(uintptr_t)0));
1355 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, (void *)(~(uintptr_t)0x87654321)), (void *)(~(uintptr_t)0), (void *)(~(uintptr_t)0x87654321));
1356 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgPtr(ppv, NULL), (void *)(~(uintptr_t)0x87654321), NULL);
1357
1358 CHECK_OP_AND_VAL(void *, "%p", ppv, ASMAtomicXchgR3Ptr(ppv, (void *)ppv), NULL, (void *)ppv);
1359
1360 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)ppv;
1361 RTSEMEVENT hRet;
1362 ASMAtomicXchgHandle(phEvt, (RTSEMEVENT)(~(uintptr_t)12345), &hRet);
1363 CHECKVAL(hRet, (RTSEMEVENT)ppv, "%p");
1364 CHECKVAL(*phEvt, (RTSEMEVENT)(~(uintptr_t)12345), "%p");
1365}
1366
1367
1368static void tstASMAtomicXchg(void)
1369{
1370 DO_SIMPLE_TEST(ASMAtomicXchgU8, uint8_t);
1371 DO_SIMPLE_TEST(ASMAtomicXchgU16, uint16_t);
1372 DO_SIMPLE_TEST(ASMAtomicXchgU32, uint32_t);
1373 DO_SIMPLE_TEST(ASMAtomicXchgU64, uint64_t);
1374 DO_SIMPLE_TEST(ASMAtomicXchgPtr, void *);
1375}
1376
1377
1378DECLINLINE(void) tstASMAtomicCmpXchgU8Worker(uint8_t volatile *pu8)
1379{
1380 *pu8 = 0xff;
1381 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0), false, 0xff);
1382 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0, 0xff), true, 0);
1383 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), true, 0x97);
1384 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x97, 0), false, 0x97);
1385 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu8, ASMAtomicCmpXchgU8(pu8, 0x7f, 0x97), true, 0x7f);
1386
1387 int8_t volatile *pi8 = (int8_t volatile *)pu8;
1388 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, -2, 0x7f), true, -2);
1389 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, -2), true, INT8_MAX);
1390 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MAX, INT8_MIN), false, INT8_MAX);
1391 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, INT8_MIN, INT8_MAX), true, INT8_MIN);
1392 CHECK_OP_AND_VAL(bool, "%d", pi8, ASMAtomicCmpXchgS8(pi8, 1, INT8_MIN), true, 1);
1393
1394 bool volatile *pf = (bool volatile *)pu8;
1395 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, true, true), true, true);
1396 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), true, false);
1397 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, true), false, false);
1398 CHECK_OP_AND_VAL(bool, "%d", pf, ASMAtomicCmpXchgBool(pf, false, false), true, false);
1399}
1400
1401
1402DECLINLINE(void) tstASMAtomicCmpXchgU32Worker(uint32_t volatile *pu32)
1403{
1404 *pu32 = UINT32_C(0xffffffff);
1405 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, 0), false, UINT32_C(0xffffffff));
1406 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, 0, UINT32_C(0xffffffff)), true, 0);
1407 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), UINT32_C(0x12345678)), false, 0);
1408 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0x80088efd), 0), true, UINT32_C(0x80088efd));
1409 CHECK_OP_AND_VAL_EX(bool, "%d", "%#x", pu32, ASMAtomicCmpXchgU32(pu32, UINT32_C(0xfffffffe), UINT32_C(0x80088efd)), true, UINT32_C(0xfffffffe));
1410
1411 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1412 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, 2), false, -2);
1413 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MIN, -2), true, INT32_MIN);
1414 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, -2), false, INT32_MIN);
1415 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), true, -19);
1416 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, -19, INT32_MIN), false, -19);
1417 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, 19, -19), true, 19);
1418 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, -234), false, 19);
1419 CHECK_OP_AND_VAL_EX(bool, "%d", "%d", pi32, ASMAtomicCmpXchgS32(pi32, INT32_MAX, 19), true, INT32_MAX);
1420
1421#if ARCH_BITS == 32
1422 void * volatile *ppv = (void * volatile *)pu32;
1423 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1424 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1425 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1426 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1427
1428 bool fRc;
1429 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1430 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1431 CHECKVAL(fRc, false, "%d");
1432 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1433
1434 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1435 CHECKVAL(fRc, true, "%d");
1436 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1437#endif
1438}
1439
1440
1441DECLINLINE(void) tstASMAtomicCmpXchgU64Worker(uint64_t volatile *pu64)
1442{
1443 *pu64 = UINT64_C(0xffffffffffffff);
1444 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, 0), false, UINT64_C(0xffffffffffffff));
1445 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, 0, UINT64_C(0xffffffffffffff)), true, 0);
1446 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 1), false, 0);
1447 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), true, UINT64_C(0x80040008008efd));
1448 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0x80040008008efd), 0), false, UINT64_C(0x80040008008efd));
1449 CHECK_OP_AND_VAL_EX(bool, "%d", "%#llx", pu64, ASMAtomicCmpXchgU64(pu64, UINT64_C(0xfffffffffffffffd), UINT64_C(0x80040008008efd)), true, UINT64_C(0xfffffffffffffffd));
1450
1451 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1452 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, 0), false, -3);
1453 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MAX, -3), true, INT64_MAX);
1454 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MIN), false, INT64_MAX);
1455 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, INT64_MIN, INT64_MAX), true, INT64_MIN);
1456 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, -29), false, INT64_MIN);
1457 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), true, -29);
1458 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, -29, INT64_MIN), false, -29);
1459 CHECK_OP_AND_VAL_EX(bool, "%d", "%#lld", pi64, ASMAtomicCmpXchgS64(pi64, 29, -29), true, 29);
1460
1461#if ARCH_BITS == 64
1462 void * volatile *ppv = (void * volatile *)pu64;
1463 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)-29), false, (void *)(intptr_t)29);
1464 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), true, NULL);
1465 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, NULL, (void *)(intptr_t)29), false, NULL);
1466 CHECK_OP_AND_VAL_EX(bool, "%d", "%p", ppv, ASMAtomicCmpXchgPtrVoid(ppv, (void *)~(uintptr_t)42, NULL), true, (void *)~(uintptr_t)42);
1467
1468 bool fRc;
1469 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1470 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)NULL, fRc);
1471 CHECKVAL(fRc, false, "%d");
1472 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)42, "%p");
1473
1474 ASMAtomicCmpXchgHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, (RTSEMEVENT)~(uintptr_t)42, fRc);
1475 CHECKVAL(fRc, true, "%d");
1476 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12356389, "%p");
1477#endif
1478}
1479
1480
1481static void tstASMAtomicCmpXchg(void)
1482{
1483 DO_SIMPLE_TEST(ASMAtomicCmpXchgU8, uint8_t);
1484 DO_SIMPLE_TEST(ASMAtomicCmpXchgU32, uint32_t);
1485 DO_SIMPLE_TEST(ASMAtomicCmpXchgU64, uint64_t);
1486}
1487
1488
1489DECLINLINE(void) tstASMAtomicCmpXchgExU32Worker(uint32_t volatile *pu32)
1490{
1491 *pu32 = UINT32_C(0xffffffff);
1492 uint32_t u32Old = UINT32_C(0x80005111);
1493 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, 0, &u32Old), false, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1494 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), true, 0, UINT32_C(0xffffffff));
1495 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, 0, UINT32_C(0xffffffff), &u32Old), false, 0, UINT32_C(0x00000000));
1496 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), true, UINT32_C(0x80088efd), 0);
1497 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0x80088efd), 0, &u32Old), false, UINT32_C(0x80088efd), UINT32_C(0x80088efd));
1498 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#x", pu32, u32Old, ASMAtomicCmpXchgExU32(pu32, UINT32_C(0xffffffe0), UINT32_C(0x80088efd), &u32Old), true, UINT32_C(0xffffffe0), UINT32_C(0x80088efd));
1499
1500 int32_t volatile *pi32 = (int32_t volatile *)pu32;
1501 int32_t i32Old = 0;
1502 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, 32, &i32Old), false, -32, -32);
1503 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 32, -32, &i32Old), true, 32, -32);
1504 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), true, INT32_MIN, 32);
1505 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MIN, 32, &i32Old), false, INT32_MIN, INT32_MIN);
1506 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MAX, &i32Old), false, INT32_MIN, INT32_MIN);
1507 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, INT32_MAX, INT32_MIN, &i32Old), true, INT32_MAX, INT32_MIN);
1508 CHECK_OP_AND_VAL_EX2(bool, "%d", "%d", pi32, i32Old, ASMAtomicCmpXchgExS32(pi32, 42, INT32_MAX, &i32Old), true, 42, INT32_MAX);
1509
1510#if ARCH_BITS == 32
1511 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu32;
1512 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1513 bool fRc = true;
1514 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1515 CHECKVAL(fRc, false, "%d");
1516 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1517 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1518
1519 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1520 CHECKVAL(fRc, true, "%d");
1521 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1522 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1523#endif
1524}
1525
1526
1527DECLINLINE(void) tstASMAtomicCmpXchgExU64Worker(uint64_t volatile *pu64)
1528{
1529 *pu64 = UINT64_C(0xffffffffffffffff);
1530 uint64_t u64Old = UINT64_C(0x8000000051111111);
1531 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, 0, &u64Old), false, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
1532 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, 0, UINT64_C(0xffffffffffffffff), &u64Old), true, 0, UINT64_C(0xffffffffffffffff));
1533 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0x342, &u64Old), false, 0, 0);
1534 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0x0080040008008efd), 0, &u64Old), true, UINT64_C(0x0080040008008efd), 0);
1535 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#llx", pu64, u64Old, ASMAtomicCmpXchgExU64(pu64, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd), &u64Old), true, UINT64_C(0xffffffffffffffc0), UINT64_C(0x0080040008008efd));
1536
1537 int64_t volatile *pi64 = (int64_t volatile *)pu64;
1538 int64_t i64Old = -3;
1539 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, 64, &i64Old), false, -64, -64);
1540 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), true, 64, -64);
1541 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 64, -64, &i64Old), false, 64, 64);
1542 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, -64, &i64Old), false, 64, 64);
1543 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MIN, 64, &i64Old), true, INT64_MIN, 64);
1544 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, INT64_MAX, INT64_MIN, &i64Old), true, INT64_MAX, INT64_MIN);
1545 CHECK_OP_AND_VAL_EX2(bool, "%d", "%#lld", pi64, i64Old, ASMAtomicCmpXchgExS64(pi64, 42, INT64_MAX, &i64Old), true, 42, INT64_MAX);
1546
1547#if ARCH_BITS == 64
1548 RTSEMEVENT volatile *phEvt = (RTSEMEVENT volatile *)pu64;
1549 RTSEMEVENT hEvtOld = (RTSEMEVENT)~(uintptr_t)31;
1550 bool fRc = true;
1551 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)~(uintptr_t)0, fRc, &hEvtOld);
1552 CHECKVAL(fRc, false, "%d");
1553 CHECKVAL(*phEvt, (RTSEMEVENT)(uintptr_t)42, "%p");
1554 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1555
1556 ASMAtomicCmpXchgExHandle(phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, (RTSEMEVENT)(uintptr_t)42, fRc, &hEvtOld);
1557 CHECKVAL(fRc, true, "%d");
1558 CHECKVAL(*phEvt, (RTSEMEVENT)~(uintptr_t)0x12380964, "%p");
1559 CHECKVAL(hEvtOld, (RTSEMEVENT)(uintptr_t)42, "%p");
1560
1561 void * volatile *ppv = (void * volatile *)pu64;
1562 void *pvOld;
1563 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, NULL, &pvOld), false, (void *)~(uintptr_t)0x12380964, (void *)~(uintptr_t)0x12380964);
1564 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtrVoid(ppv, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964, &pvOld), true, (void *)(intptr_t)12345678, (void *)~(uintptr_t)0x12380964);
1565
1566 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)~(uintptr_t)99, &pvOld), false, (void *)(intptr_t)12345678, (void *)(intptr_t)12345678);
1567 CHECK_OP_AND_VAL_EX2(bool, "%d", "%p", ppv, pvOld, ASMAtomicCmpXchgExPtr(ppv, (void *)~(uintptr_t)99, (void *)(intptr_t)12345678, &pvOld), true, (void *)~(intptr_t)99, (void *)(intptr_t)12345678);
1568#endif
1569}
1570
1571
1572static void tstASMAtomicCmpXchgEx(void)
1573{
1574 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU32, uint32_t);
1575 DO_SIMPLE_TEST(ASMAtomicCmpXchgExU64, uint64_t);
1576}
1577
1578
1579#define TEST_RET_OLD(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1580 a_Type const uOldExpect = *(a_pVar); \
1581 a_Type uOldRet = a_Function(a_pVar, a_uVal); \
1582 if (RT_LIKELY( uOldRet == (uOldExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1583 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s," a_Fmt ") -> " a_Fmt ", expected " a_Fmt "; %s=" a_Fmt ", expected " a_Fmt "\n", \
1584 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uOldRet, uOldExpect, #a_pVar, *(a_pVar), (a_VarExpect)); \
1585 } while (0)
1586
1587
1588DECLINLINE(void) tstASMAtomicAddU32Worker(uint32_t *pu32)
1589{
1590 *pu32 = 10;
1591 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, 11);
1592 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffffe), 9);
1593 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0xfffffff7), 0);
1594 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffff));
1595 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000000));
1596 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 1, UINT32_C(0x80000001));
1597 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, UINT32_C(0x7fffffff), 0);
1598 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAddU32, 0, 0);
1599
1600 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 0, 0);
1601 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, 32, UINT32_C(0xffffffe0));
1602 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffff), UINT32_C(0x7fffffe1));
1603 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicSubU32, UINT32_C(0x7fffffde), UINT32_C(0x00000003));
1604}
1605
1606
1607DECLINLINE(void) tstASMAtomicAddS32Worker(int32_t *pi32)
1608{
1609 *pi32 = 10;
1610 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 1, 11);
1611 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -2, 9);
1612 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -9, 0);
1613 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, -0x7fffffff, -0x7fffffff);
1614 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, -0x7fffffff);
1615 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0x7fffffff, 0);
1616 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicAddS32, 0, 0);
1617
1618 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 0, 0);
1619 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, 1, -1);
1620 TEST_RET_OLD(int32_t, "%d", pi32, ASMAtomicSubS32, INT32_MIN, INT32_MAX);
1621}
1622
1623
1624DECLINLINE(void) tstASMAtomicAddU64Worker(uint64_t volatile *pu64)
1625{
1626 *pu64 = 10;
1627 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, 1, 11);
1628 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffffe), UINT64_C(0x0000000000000009));
1629 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0xfffffffffffffff7), UINT64_C(0x0000000000000000));
1630 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0x7ffffffffffffff0));
1631 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x7ffffffffffffff0), UINT64_C(0xffffffffffffffe0));
1632 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000000), UINT64_C(0xffffffffffffffe0));
1633 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x000000000000001f), UINT64_C(0xffffffffffffffff));
1634 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicAddU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000000));
1635
1636 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
1637 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x0000000000000020), UINT64_C(0xffffffffffffffe0));
1638 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffff), UINT64_C(0x7fffffffffffffe1));
1639 TEST_RET_OLD(uint64_t, "%llx", pu64, ASMAtomicSubU64, UINT64_C(0x7fffffffffffffdd), UINT64_C(0x0000000000000004));
1640}
1641
1642
1643DECLINLINE(void) tstASMAtomicAddS64Worker(int64_t volatile *pi64)
1644{
1645 *pi64 = 10;
1646 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 11);
1647 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -2, 9);
1648 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -9, 0);
1649 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -INT64_MAX, -INT64_MAX);
1650 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, -INT64_MAX);
1651 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, -1, INT64_MIN);
1652 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, INT64_MAX, -1);
1653 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 1, 0);
1654 TEST_RET_OLD(int64_t, "%lld", pi64, ASMAtomicAddS64, 0, 0);
1655
1656 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 0, 0);
1657 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, 1, -1);
1658 TEST_RET_OLD(int64_t, "%d", pi64, ASMAtomicSubS64, INT64_MIN, INT64_MAX);
1659}
1660
1661
1662
1663DECLINLINE(void) tstASMAtomicAddZWorker(size_t volatile *pcb)
1664{
1665 *pcb = 10;
1666 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, 1, 11);
1667 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)1, 9);
1668 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicAddZ, ~(size_t)8, 0);
1669
1670 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 0, 0);
1671 TEST_RET_OLD(size_t, "%zx", pcb, ASMAtomicSubZ, 10, ~(size_t)9);
1672}
1673
1674static void tstASMAtomicAdd(void)
1675{
1676 DO_SIMPLE_TEST(ASMAtomicAddU32, uint32_t);
1677 DO_SIMPLE_TEST(ASMAtomicAddS32, int32_t);
1678 DO_SIMPLE_TEST(ASMAtomicAddU64, uint64_t);
1679 DO_SIMPLE_TEST(ASMAtomicAddS64, int64_t);
1680 DO_SIMPLE_TEST(ASMAtomicAddZ, size_t);
1681}
1682
1683
1684#define TEST_RET_NEW_NV(a_Type, a_Fmt, a_pVar, a_Function, a_VarExpect) do { \
1685 a_Type uNewRet = a_Function(a_pVar); \
1686 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1687 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s) -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1688 __FUNCTION__, __LINE__, #a_Function, #a_pVar, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1689 } while (0)
1690
1691
1692DECLINLINE(void) tstASMAtomicDecIncU32Worker(uint32_t volatile *pu32)
1693{
1694 *pu32 = 3;
1695 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 2);
1696 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1697 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 0);
1698 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX);
1699 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 1);
1700 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, UINT32_MAX - 2);
1701 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX - 1);
1702 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, UINT32_MAX);
1703 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 0);
1704 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 1);
1705 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1706 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, 1);
1707 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, 2);
1708 *pu32 = _1M;
1709 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicDecU32, _1M - 1);
1710 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M);
1711 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicIncU32, _1M + 1);
1712}
1713
1714DECLINLINE(void) tstASMAtomicUoDecIncU32Worker(uint32_t volatile *pu32)
1715{
1716 *pu32 = 3;
1717 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 2);
1718 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1719 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 0);
1720 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX);
1721 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 1);
1722 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, UINT32_MAX - 2);
1723 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX - 1);
1724 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, UINT32_MAX);
1725 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 0);
1726 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 1);
1727 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1728 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, 1);
1729 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, 2);
1730 *pu32 = _1M;
1731 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoDecU32, _1M - 1);
1732 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M);
1733 TEST_RET_NEW_NV(uint32_t, "%#x", pu32, ASMAtomicUoIncU32, _1M + 1);
1734}
1735
1736
1737DECLINLINE(void) tstASMAtomicDecIncS32Worker(int32_t volatile *pi32)
1738{
1739 *pi32 = 10;
1740 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 9);
1741 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 8);
1742 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 7);
1743 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 6);
1744 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 5);
1745 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 4);
1746 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 3);
1747 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1748 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 1);
1749 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 0);
1750 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -1);
1751 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, -2);
1752 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, -1);
1753 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 0);
1754 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 1);
1755 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 2);
1756 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1757 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1758 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1759 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, 2);
1760 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, 3);
1761 *pi32 = INT32_MAX;
1762 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicDecS32, INT32_MAX - 1);
1763 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MAX);
1764 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicIncS32, INT32_MIN);
1765}
1766
1767
1768#if 0
1769DECLINLINE(void) tstASMAtomicUoDecIncS32Worker(int32_t volatile *pi32)
1770{
1771 *pi32 = 10;
1772 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 9);
1773 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 8);
1774 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 7);
1775 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 6);
1776 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 5);
1777 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 4);
1778 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 3);
1779 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1780 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 1);
1781 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 0);
1782 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -1);
1783 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, -2);
1784 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, -1);
1785 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 0);
1786 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 1);
1787 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 2);
1788 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1789 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1790 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1791 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, 2);
1792 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, 3);
1793 *pi32 = INT32_MAX;
1794 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoDecS32, INT32_MAX - 1);
1795 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MAX);
1796 TEST_RET_NEW_NV(int32_t, "%d", pi32, ASMAtomicUoIncS32, INT32_MIN);
1797}
1798#endif
1799
1800
1801DECLINLINE(void) tstASMAtomicDecIncU64Worker(uint64_t volatile *pu64)
1802{
1803 *pu64 = 3;
1804 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 2);
1805 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 1);
1806 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, 0);
1807 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX);
1808 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 1);
1809 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, UINT64_MAX - 2);
1810 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX - 1);
1811 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, UINT64_MAX);
1812 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 0);
1813 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 1);
1814 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, 2);
1815 *pu64 = _4G - 1;
1816 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G - 2);
1817 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G - 1);
1818 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G);
1819 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicIncU64, _4G + 1);
1820 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicDecU64, _4G);
1821}
1822
1823
1824#if 0
1825DECLINLINE(void) tstASMAtomicUoDecIncU64Worker(uint64_t volatile *pu64)
1826{
1827 *pu64 = 3;
1828 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 2);
1829 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 1);
1830 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, 0);
1831 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX);
1832 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 1);
1833 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, UINT64_MAX - 2);
1834 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX - 1);
1835 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, UINT64_MAX);
1836 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 0);
1837 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 1);
1838 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, 2);
1839 *pu64 = _4G - 1;
1840 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G - 2);
1841 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G - 1);
1842 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G);
1843 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoIncU64, _4G + 1);
1844 TEST_RET_NEW_NV(uint64_t, "%lld", pu64, ASMAtomicUoDecU64, _4G);
1845}
1846#endif
1847
1848
1849DECLINLINE(void) tstASMAtomicDecIncS64Worker(int64_t volatile *pi64)
1850{
1851 *pi64 = 10;
1852 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 9);
1853 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 8);
1854 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 7);
1855 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 6);
1856 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 5);
1857 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 4);
1858 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 3);
1859 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1860 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 1);
1861 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 0);
1862 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -1);
1863 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, -2);
1864 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, -1);
1865 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 0);
1866 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 1);
1867 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 2);
1868 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1869 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1870 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1871 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, 2);
1872 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicIncS64, 3);
1873 *pi64 = INT64_MAX;
1874 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicDecS64, INT64_MAX - 1);
1875}
1876
1877
1878#if 0
1879DECLINLINE(void) tstASMAtomicUoDecIncS64Worker(int64_t volatile *pi64)
1880{
1881 *pi64 = 10;
1882 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 9);
1883 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 8);
1884 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 7);
1885 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 6);
1886 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 5);
1887 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 4);
1888 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 3);
1889 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1890 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 1);
1891 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 0);
1892 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -1);
1893 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, -2);
1894 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, -1);
1895 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 0);
1896 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 1);
1897 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 2);
1898 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1899 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1900 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1901 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, 2);
1902 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoIncS64, 3);
1903 *pi64 = INT64_MAX;
1904 TEST_RET_NEW_NV(int64_t, "%lld", pi64, ASMAtomicUoDecS64, INT64_MAX - 1);
1905}
1906#endif
1907
1908
1909DECLINLINE(void) tstASMAtomicDecIncZWorker(size_t volatile *pcb)
1910{
1911 size_t const uBaseVal = ~(size_t)0 >> 7;
1912 *pcb = uBaseVal;
1913 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1914 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 2);
1915 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 3);
1916 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 2);
1917 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal - 1);
1918 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1919 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal + 1);
1920 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal);
1921 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicDecZ, uBaseVal - 1);
1922 TEST_RET_NEW_NV(size_t, "%zx", pcb, ASMAtomicIncZ, uBaseVal);
1923}
1924
1925
1926static void tstASMAtomicDecInc(void)
1927{
1928 DO_SIMPLE_TEST(ASMAtomicDecIncU32, uint32_t);
1929 DO_SIMPLE_TEST(ASMAtomicUoDecIncU32, uint32_t);
1930 DO_SIMPLE_TEST(ASMAtomicDecIncS32, int32_t);
1931 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS32, int32_t);
1932 DO_SIMPLE_TEST(ASMAtomicDecIncU64, uint64_t);
1933 //DO_SIMPLE_TEST(ASMAtomicUoDecIncU64, uint64_t);
1934 DO_SIMPLE_TEST(ASMAtomicDecIncS64, int64_t);
1935 //DO_SIMPLE_TEST(ASMAtomicUoDecIncS64, int64_t);
1936 DO_SIMPLE_TEST(ASMAtomicDecIncZ, size_t);
1937}
1938
1939
1940#define TEST_RET_VOID(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1941 a_Function(a_pVar, a_uVal); \
1942 if (RT_LIKELY( *(a_pVar) == (a_VarExpect) )) { } \
1943 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> %s=" a_Fmt ", expected " a_Fmt "\n", \
1944 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, #a_pVar, *(a_pVar), (a_VarExpect)); \
1945 } while (0)
1946
1947#define TEST_RET_NEW(a_Type, a_Fmt, a_pVar, a_Function, a_uVal, a_VarExpect) do { \
1948 a_Type uNewRet = a_Function(a_pVar, a_uVal); \
1949 if (RT_LIKELY( uNewRet == (a_VarExpect) && *(a_pVar) == (a_VarExpect) )) { } \
1950 else RTTestFailed(g_hTest, "%s, %d: FAILURE: %s(%s, " a_Fmt ") -> " a_Fmt " and %s=" a_Fmt ", expected both " a_Fmt "\n", \
1951 __FUNCTION__, __LINE__, #a_Function, #a_pVar, a_uVal, uNewRet, #a_pVar, *(a_pVar), (a_VarExpect)); \
1952 } while (0)
1953
1954
1955DECLINLINE(void) tstASMAtomicAndOrXorU32Worker(uint32_t volatile *pu32)
1956{
1957 *pu32 = UINT32_C(0xffffffff);
1958 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1959 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1960 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1961 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1962 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(1), UINT32_C(1));
1963 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1964 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1965 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0), UINT32_C(0));
1966 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1967 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1968 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1969 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1970 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1971}
1972
1973
1974DECLINLINE(void) tstASMAtomicUoAndOrXorU32Worker(uint32_t volatile *pu32)
1975{
1976 *pu32 = UINT32_C(0xffffffff);
1977 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1978 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1979 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1980 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
1981 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(1), UINT32_C(1));
1982 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
1983 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
1984 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0), UINT32_C(0));
1985 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoOrU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
1986 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoAndU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
1987 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
1988 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
1989 TEST_RET_VOID(uint32_t, "%#x", pu32, ASMAtomicUoXorU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
1990}
1991
1992
1993DECLINLINE(void) tstASMAtomicAndOrXorExU32Worker(uint32_t volatile *pu32)
1994{
1995 *pu32 = UINT32_C(0xffffffff);
1996 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1997 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0xffffffff), UINT32_C(0xffffffff));
1998 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x8f8f8f8f), UINT32_C(0x8f8f8f8f));
1999 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x70707070), UINT32_C(0xffffffff));
2000 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(1), UINT32_C(1));
2001 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x80000000), UINT32_C(0x80000001));
2002 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x80000000), UINT32_C(0x80000000));
2003 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0), UINT32_C(0));
2004 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicOrExU32, UINT32_C(0x42424242), UINT32_C(0x42424242));
2005 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicAndExU32, UINT32_C(0x00ff0f00), UINT32_C(0x00420200));
2006 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x42004042), UINT32_C(0x42424242));
2007 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0xff024200), UINT32_C(0xbd400042));
2008 TEST_RET_OLD(uint32_t, "%#x", pu32, ASMAtomicXorExU32, UINT32_C(0x00000000), UINT32_C(0xbd400042));
2009}
2010
2011
2012DECLINLINE(void) tstASMAtomicAndOrXorU64Worker(uint64_t volatile *pu64)
2013{
2014 *pu64 = UINT64_C(0xffffffff);
2015 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2016 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2017 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2018 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2019 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(1), UINT64_C(1));
2020 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2021 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2022 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2023 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2024 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2025 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2026 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2027 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2028
2029 /* full 64-bit */
2030 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2031 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2032 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2033 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2034 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2035 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2036 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2037 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2038 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0), UINT64_C(0));
2039 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2040 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2041 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2042 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2043 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2044}
2045
2046
2047DECLINLINE(void) tstASMAtomicUoAndOrXorU64Worker(uint64_t volatile *pu64)
2048{
2049 *pu64 = UINT64_C(0xffffffff);
2050 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2051 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2052 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2053 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2054 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(1), UINT64_C(1));
2055 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2056 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2057 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2058 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2059 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2060 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2061 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2062 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2063
2064 /* full 64-bit */
2065 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2066 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2067 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2068 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2069 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2070 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2071 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2072 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2073 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0), UINT64_C(0));
2074 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoOrU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2075 TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoAndU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2076 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2077 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2078 //TEST_RET_VOID(uint64_t, "%#llx", pu64, ASMAtomicUoXorU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2079}
2080
2081
2082#if 0
2083DECLINLINE(void) tstASMAtomicAndOrXorExU64Worker(uint64_t volatile *pu64)
2084{
2085 *pu64 = UINT64_C(0xffffffff);
2086 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2087 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffff), UINT64_C(0xffffffff));
2088 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f), UINT64_C(0x8f8f8f8f));
2089 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x70707070), UINT64_C(0xffffffff));
2090 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(1), UINT64_C(1));
2091 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x80000000), UINT64_C(0x80000001));
2092 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x80000000), UINT64_C(0x80000000));
2093 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2094 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x42424242), UINT64_C(0x42424242));
2095 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00), UINT64_C(0x00420200));
2096 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x42004042), UINT64_C(0x42424242));
2097 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff024200), UINT64_C(0xbd400042));
2098 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x00000000), UINT64_C(0xbd400042));
2099
2100 /* full 64-bit */
2101 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000000), UINT64_C(0x0000000000000000));
2102 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2103 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0xffffffffffffffff), UINT64_C(0xffffffffffffffff));
2104 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8f8f8f8f8f8f8f8f), UINT64_C(0x8f8f8f8f8f8f8f8f));
2105 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x7070707070707070), UINT64_C(0xffffffffffffffff));
2106 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x0000000000000001), UINT64_C(0x0000000000000001));
2107 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000001));
2108 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x8000000000000000), UINT64_C(0x8000000000000000));
2109 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0), UINT64_C(0));
2110 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicOrExU64, UINT64_C(0x4242424242424242), UINT64_C(0x4242424242424242));
2111 TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicAndExU64, UINT64_C(0x00ff0f00ff0f0000), UINT64_C(0x0042020042020000));
2112 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x4200404242040000), UINT64_C(0x4242424242420000));
2113 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0xff02420000ff2127), UINT64_C(0xbd40004242bd2127));
2114 //TEST_RET_OLD(uint64_t, "%#llx", pu64, ASMAtomicXorExU64, UINT64_C(0x0000000000000000), UINT64_C(0xbd40004242bd2127));
2115}
2116#endif
2117
2118
2119static void tstASMAtomicAndOrXor(void)
2120{
2121 DO_SIMPLE_TEST(ASMAtomicAndOrXorU32, uint32_t);
2122 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU32, uint32_t);
2123 DO_SIMPLE_TEST(ASMAtomicAndOrXorExU32, uint32_t);
2124 DO_SIMPLE_TEST(ASMAtomicAndOrXorU64, uint64_t);
2125 DO_SIMPLE_TEST(ASMAtomicUoAndOrXorU64, uint64_t);
2126 //DO_SIMPLE_TEST(ASMAtomicAndOrXorExU64, uint64_t);
2127}
2128
2129
2130typedef struct
2131{
2132 uint8_t ab[PAGE_SIZE];
2133} TSTPAGE;
2134
2135
2136DECLINLINE(void) tstASMMemZeroPageWorker(TSTPAGE *pPage)
2137{
2138 for (unsigned j = 0; j < 16; j++)
2139 {
2140 memset(pPage, 0x11 * j, sizeof(*pPage));
2141 ASMMemZeroPage(pPage);
2142 for (unsigned i = 0; i < sizeof(pPage->ab); i++)
2143 if (pPage->ab[i])
2144 RTTestFailed(g_hTest, "ASMMemZeroPage didn't clear byte at offset %#x!\n", i);
2145 if (ASMMemIsZeroPage(pPage) != true)
2146 RTTestFailed(g_hTest, "ASMMemIsZeroPage returns false after ASMMemZeroPage!\n");
2147 if (ASMMemFirstMismatchingU32(pPage, sizeof(pPage), 0) != NULL)
2148 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,0) returns non-NULL after ASMMemZeroPage!\n");
2149 }
2150}
2151
2152
2153static void tstASMMemZeroPage(void)
2154{
2155 RTTestISub("ASMMemZeroPage");
2156 DO_SIMPLE_TEST_NO_SUB_NO_STACK(tstASMMemZeroPageWorker, TSTPAGE);
2157}
2158
2159
2160void tstASMMemIsZeroPage(RTTEST hTest)
2161{
2162 RTTestSub(hTest, "ASMMemIsZeroPage");
2163
2164 void *pvPage1 = RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2165 void *pvPage2 = RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2166 RTTESTI_CHECK_RETV(pvPage1 && pvPage2);
2167
2168 memset(pvPage1, 0, PAGE_SIZE);
2169 memset(pvPage2, 0, PAGE_SIZE);
2170 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage1));
2171 RTTESTI_CHECK(ASMMemIsZeroPage(pvPage2));
2172
2173 memset(pvPage1, 0xff, PAGE_SIZE);
2174 memset(pvPage2, 0xff, PAGE_SIZE);
2175 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2176 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2177
2178 memset(pvPage1, 0, PAGE_SIZE);
2179 memset(pvPage2, 0, PAGE_SIZE);
2180 for (unsigned off = 0; off < PAGE_SIZE; off++)
2181 {
2182 ((uint8_t *)pvPage1)[off] = 1;
2183 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage1));
2184 ((uint8_t *)pvPage1)[off] = 0;
2185
2186 ((uint8_t *)pvPage2)[off] = 0x80;
2187 RTTESTI_CHECK(!ASMMemIsZeroPage(pvPage2));
2188 ((uint8_t *)pvPage2)[off] = 0;
2189 }
2190
2191 RTTestSubDone(hTest);
2192}
2193
2194
2195void tstASMMemFirstMismatchingU8(RTTEST hTest)
2196{
2197 RTTestSub(hTest, "ASMMemFirstMismatchingU8");
2198
2199 uint8_t *pbPage1 = (uint8_t *)RTTestGuardedAllocHead(hTest, PAGE_SIZE);
2200 uint8_t *pbPage2 = (uint8_t *)RTTestGuardedAllocTail(hTest, PAGE_SIZE);
2201 RTTESTI_CHECK_RETV(pbPage1 && pbPage2);
2202
2203 memset(pbPage1, 0, PAGE_SIZE);
2204 memset(pbPage2, 0, PAGE_SIZE);
2205 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0) == NULL);
2206 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0) == NULL);
2207 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 1) == pbPage1);
2208 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 1) == pbPage2);
2209 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0x87) == pbPage1);
2210 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0x87) == pbPage2);
2211 RTTESTI_CHECK(ASMMemIsZero(pbPage1, PAGE_SIZE));
2212 RTTESTI_CHECK(ASMMemIsZero(pbPage2, PAGE_SIZE));
2213 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2214 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2215 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0x34));
2216 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0x88));
2217 unsigned cbSub = 32;
2218 while (cbSub-- > 0)
2219 {
2220 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2221 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0) == NULL);
2222 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0) == NULL);
2223 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0) == NULL);
2224
2225 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0x34) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2226 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0x99) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2227 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0x42) == pbPage1 || !cbSub);
2228 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0x88) == pbPage2 || !cbSub);
2229 }
2230
2231 memset(pbPage1, 0xff, PAGE_SIZE);
2232 memset(pbPage2, 0xff, PAGE_SIZE);
2233 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xff) == NULL);
2234 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xff) == NULL);
2235 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, PAGE_SIZE, 0xfe) == pbPage1);
2236 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, PAGE_SIZE, 0xfe) == pbPage2);
2237 RTTESTI_CHECK(!ASMMemIsZero(pbPage1, PAGE_SIZE));
2238 RTTESTI_CHECK(!ASMMemIsZero(pbPage2, PAGE_SIZE));
2239 RTTESTI_CHECK(ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0xff));
2240 RTTESTI_CHECK(ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0xff));
2241 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage1, PAGE_SIZE, 0));
2242 RTTESTI_CHECK(!ASMMemIsAllU8(pbPage2, PAGE_SIZE, 0));
2243 cbSub = 32;
2244 while (cbSub-- > 0)
2245 {
2246 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2247 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xff) == NULL);
2248 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xff) == NULL);
2249 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xff) == NULL);
2250
2251 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage1[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage1[PAGE_SIZE - cbSub] || !cbSub);
2252 RTTESTI_CHECK(ASMMemFirstMismatchingU8(&pbPage2[PAGE_SIZE - cbSub], cbSub, 0xfe) == &pbPage2[PAGE_SIZE - cbSub] || !cbSub);
2253 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage1, cbSub, 0xfe) == pbPage1 || !cbSub);
2254 RTTESTI_CHECK(ASMMemFirstMismatchingU8(pbPage2, cbSub, 0xfe) == pbPage2 || !cbSub);
2255 }
2256
2257
2258 /*
2259 * Various alignments and sizes.
2260 */
2261 uint8_t const bFiller1 = 0x00;
2262 uint8_t const bFiller2 = 0xf6;
2263 size_t const cbBuf = 128;
2264 uint8_t *pbBuf1 = pbPage1;
2265 uint8_t *pbBuf2 = &pbPage2[PAGE_SIZE - cbBuf]; /* Put it up against the tail guard */
2266 memset(pbPage1, ~bFiller1, PAGE_SIZE);
2267 memset(pbPage2, ~bFiller2, PAGE_SIZE);
2268 memset(pbBuf1, bFiller1, cbBuf);
2269 memset(pbBuf2, bFiller2, cbBuf);
2270 for (size_t offNonZero = 0; offNonZero < cbBuf; offNonZero++)
2271 {
2272 uint8_t bRand = (uint8_t)RTRandU32();
2273 pbBuf1[offNonZero] = bRand | 1;
2274 pbBuf2[offNonZero] = (0x80 | bRand) ^ 0xf6;
2275
2276 for (size_t offStart = 0; offStart < 32; offStart++)
2277 {
2278 size_t const cbMax = cbBuf - offStart;
2279 for (size_t cb = 0; cb < cbMax; cb++)
2280 {
2281 size_t const offEnd = offStart + cb;
2282 uint8_t bSaved1, bSaved2;
2283 if (offEnd < PAGE_SIZE)
2284 {
2285 bSaved1 = pbBuf1[offEnd];
2286 bSaved2 = pbBuf2[offEnd];
2287 pbBuf1[offEnd] = 0xff;
2288 pbBuf2[offEnd] = 0xff;
2289 }
2290#ifdef _MSC_VER /* simple stupid compiler warnings */
2291 else
2292 bSaved1 = bSaved2 = 0;
2293#endif
2294
2295 uint8_t *pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf1 + offStart, cb, bFiller1);
2296 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf1[offNonZero] : pbRet == NULL);
2297
2298 pbRet = (uint8_t *)ASMMemFirstMismatchingU8(pbBuf2 + offStart, cb, bFiller2);
2299 RTTESTI_CHECK(offNonZero - offStart < cb ? pbRet == &pbBuf2[offNonZero] : pbRet == NULL);
2300
2301 if (offEnd < PAGE_SIZE)
2302 {
2303 pbBuf1[offEnd] = bSaved1;
2304 pbBuf2[offEnd] = bSaved2;
2305 }
2306 }
2307 }
2308
2309 pbBuf1[offNonZero] = 0;
2310 pbBuf2[offNonZero] = 0xf6;
2311 }
2312
2313 RTTestSubDone(hTest);
2314}
2315
2316
2317typedef struct TSTBUF32 { uint32_t au32[384]; } TSTBUF32;
2318
2319DECLINLINE(void) tstASMMemZero32Worker(TSTBUF32 *pBuf)
2320{
2321 ASMMemZero32(pBuf, sizeof(*pBuf));
2322 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2323 if (pBuf->au32[i])
2324 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2325 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2326 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2327 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2328 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2329
2330 memset(pBuf, 0xfe, sizeof(*pBuf));
2331 ASMMemZero32(pBuf, sizeof(*pBuf));
2332 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2333 if (pBuf->au32[i])
2334 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear dword at index %#x!\n", i);
2335 if (ASMMemFirstNonZero(pBuf, sizeof(*pBuf)) != NULL)
2336 RTTestFailed(g_hTest, "ASMMemFirstNonZero return non-NULL after ASMMemZero32\n");
2337 if (!ASMMemIsZero(pBuf, sizeof(*pBuf)))
2338 RTTestFailed(g_hTest, "ASMMemIsZero return false after ASMMemZero32\n");
2339}
2340
2341
2342void tstASMMemZero32(void)
2343{
2344 RTTestSub(g_hTest, "ASMMemZero32");
2345
2346 struct
2347 {
2348 uint64_t u64Magic1;
2349 uint8_t abPage[PAGE_SIZE - 32];
2350 uint64_t u64Magic2;
2351 } Buf1, Buf2, Buf3;
2352
2353 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2354 memset(Buf1.abPage, 0x55, sizeof(Buf1.abPage));
2355 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2356 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2357 memset(Buf2.abPage, 0x77, sizeof(Buf2.abPage));
2358 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2359 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2360 memset(Buf3.abPage, 0x99, sizeof(Buf3.abPage));
2361 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2362 ASMMemZero32(Buf1.abPage, sizeof(Buf1.abPage));
2363 ASMMemZero32(Buf2.abPage, sizeof(Buf2.abPage));
2364 ASMMemZero32(Buf3.abPage, sizeof(Buf3.abPage));
2365 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2366 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2367 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2368 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2369 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2370 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2371 {
2372 RTTestFailed(g_hTest, "ASMMemZero32 violated one/both magic(s)!\n");
2373 }
2374 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.abPage); i++)
2375 if (Buf1.abPage[i])
2376 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2377 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.abPage); i++)
2378 if (Buf2.abPage[i])
2379 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2380 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.abPage); i++)
2381 if (Buf3.abPage[i])
2382 RTTestFailed(g_hTest, "ASMMemZero32 didn't clear byte at offset %#x!\n", i);
2383
2384 DO_SIMPLE_TEST_NO_SUB(tstASMMemZero32Worker, TSTBUF32);
2385}
2386
2387
2388DECLINLINE(void) tstASMMemFill32Worker(TSTBUF32 *pBuf)
2389{
2390 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1));
2391 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2392 if (pBuf->au32[i] != UINT32_C(0xf629bce1))
2393 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2394 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0xf629bce1)) != NULL)
2395 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0xf629bce1)) returns non-NULL after ASMMemFill32!\n");
2396
2397 memset(pBuf, 0xfe, sizeof(*pBuf));
2398 ASMMemFill32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678));
2399 for (unsigned i = 0; i < RT_ELEMENTS(pBuf->au32); i++)
2400 if (pBuf->au32[i] != UINT32_C(0x12345678))
2401 RTTestFailed(g_hTest, "ASMMemFill32 didn't set dword at index %#x correctly!\n", i);
2402 if (ASMMemFirstMismatchingU32(pBuf, sizeof(*pBuf), UINT32_C(0x12345678)) != NULL)
2403 RTTestFailed(g_hTest, "ASMMemFirstMismatchingU32(,,UINT32_C(0x12345678)) returns non-NULL after ASMMemFill32!\n");
2404}
2405
2406void tstASMMemFill32(void)
2407{
2408 RTTestSub(g_hTest, "ASMMemFill32");
2409
2410 struct
2411 {
2412 uint64_t u64Magic1;
2413 uint32_t au32Page[PAGE_SIZE / 4];
2414 uint64_t u64Magic2;
2415 } Buf1;
2416 struct
2417 {
2418 uint64_t u64Magic1;
2419 uint32_t au32Page[(PAGE_SIZE / 4) - 3];
2420 uint64_t u64Magic2;
2421 } Buf2;
2422 struct
2423 {
2424 uint64_t u64Magic1;
2425 uint32_t au32Page[(PAGE_SIZE / 4) - 1];
2426 uint64_t u64Magic2;
2427 } Buf3;
2428
2429 Buf1.u64Magic1 = UINT64_C(0xffffffffffffffff);
2430 memset(Buf1.au32Page, 0x55, sizeof(Buf1.au32Page));
2431 Buf1.u64Magic2 = UINT64_C(0xffffffffffffffff);
2432 Buf2.u64Magic1 = UINT64_C(0xffffffffffffffff);
2433 memset(Buf2.au32Page, 0x77, sizeof(Buf2.au32Page));
2434 Buf2.u64Magic2 = UINT64_C(0xffffffffffffffff);
2435 Buf3.u64Magic1 = UINT64_C(0xffffffffffffffff);
2436 memset(Buf3.au32Page, 0x99, sizeof(Buf3.au32Page));
2437 Buf3.u64Magic2 = UINT64_C(0xffffffffffffffff);
2438 ASMMemFill32(Buf1.au32Page, sizeof(Buf1.au32Page), 0xdeadbeef);
2439 ASMMemFill32(Buf2.au32Page, sizeof(Buf2.au32Page), 0xcafeff01);
2440 ASMMemFill32(Buf3.au32Page, sizeof(Buf3.au32Page), 0xf00dd00f);
2441 if ( Buf1.u64Magic1 != UINT64_C(0xffffffffffffffff)
2442 || Buf1.u64Magic2 != UINT64_C(0xffffffffffffffff)
2443 || Buf2.u64Magic1 != UINT64_C(0xffffffffffffffff)
2444 || Buf2.u64Magic2 != UINT64_C(0xffffffffffffffff)
2445 || Buf3.u64Magic1 != UINT64_C(0xffffffffffffffff)
2446 || Buf3.u64Magic2 != UINT64_C(0xffffffffffffffff))
2447 RTTestFailed(g_hTest, "ASMMemFill32 violated one/both magic(s)!\n");
2448 for (unsigned i = 0; i < RT_ELEMENTS(Buf1.au32Page); i++)
2449 if (Buf1.au32Page[i] != 0xdeadbeef)
2450 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf1.au32Page[i], 0xdeadbeef);
2451 for (unsigned i = 0; i < RT_ELEMENTS(Buf2.au32Page); i++)
2452 if (Buf2.au32Page[i] != 0xcafeff01)
2453 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf2.au32Page[i], 0xcafeff01);
2454 for (unsigned i = 0; i < RT_ELEMENTS(Buf3.au32Page); i++)
2455 if (Buf3.au32Page[i] != 0xf00dd00f)
2456 RTTestFailed(g_hTest, "ASMMemFill32 %#x: %#x exepcted %#x\n", i, Buf3.au32Page[i], 0xf00dd00f);
2457
2458 DO_SIMPLE_TEST_NO_SUB(tstASMMemFill32Worker, TSTBUF32);
2459}
2460
2461
2462void tstASMProbe(RTTEST hTest)
2463{
2464 RTTestSub(hTest, "ASMProbeReadByte/Buffer");
2465
2466 uint8_t b = 42;
2467 RTTESTI_CHECK(ASMProbeReadByte(&b) == 42);
2468 ASMProbeReadBuffer(&b, sizeof(b));
2469
2470 for (uint32_t cPages = 1; cPages < 16; cPages++)
2471 {
2472 uint8_t *pbBuf1 = (uint8_t *)RTTestGuardedAllocHead(hTest, cPages * PAGE_SIZE);
2473 uint8_t *pbBuf2 = (uint8_t *)RTTestGuardedAllocTail(hTest, cPages * PAGE_SIZE);
2474 RTTESTI_CHECK_RETV(pbBuf1 && pbBuf2);
2475
2476 memset(pbBuf1, 0xf6, cPages * PAGE_SIZE);
2477 memset(pbBuf2, 0x42, cPages * PAGE_SIZE);
2478
2479 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[cPages * PAGE_SIZE - 1]) == 0xf6);
2480 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[cPages * PAGE_SIZE - 1]) == 0x42);
2481 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf1[0]) == 0xf6);
2482 RTTESTI_CHECK(ASMProbeReadByte(&pbBuf2[0]) == 0x42);
2483
2484 ASMProbeReadBuffer(pbBuf1, cPages * PAGE_SIZE);
2485 ASMProbeReadBuffer(pbBuf2, cPages * PAGE_SIZE);
2486 }
2487}
2488
2489
2490void tstASMMisc(void)
2491{
2492 RTTestSub(g_hTest, "Misc");
2493 for (uint32_t i = 0; i < 20; i++)
2494 {
2495 ASMWriteFence();
2496 ASMCompilerBarrier();
2497 ASMReadFence();
2498 ASMNopPause();
2499 ASMSerializeInstruction();
2500 ASMMemoryFence();
2501 }
2502}
2503
2504void tstASMMath(void)
2505{
2506 RTTestSub(g_hTest, "Math");
2507
2508 uint64_t u64 = ASMMult2xU32RetU64(UINT32_C(0x80000000), UINT32_C(0x10000000));
2509 CHECKVAL(u64, UINT64_C(0x0800000000000000), "%#018RX64");
2510
2511 uint32_t u32 = ASMDivU64ByU32RetU32(UINT64_C(0x0800000000000000), UINT32_C(0x10000000));
2512 CHECKVAL(u32, UINT32_C(0x80000000), "%#010RX32");
2513
2514 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x00000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2515 CHECKVAL(u32, UINT32_C(0x00000001), "%#018RX32");
2516 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10000000), UINT32_C(0x80000000), UINT32_C(0x20000000));
2517 CHECKVAL(u32, UINT32_C(0x40000000), "%#018RX32");
2518 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x76543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2519 CHECKVAL(u32, UINT32_C(0x76543210), "%#018RX32");
2520 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2521 CHECKVAL(u32, UINT32_C(0xffffffff), "%#018RX32");
2522 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0xffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2523 CHECKVAL(u32, UINT32_C(0xfffffff0), "%#018RX32");
2524 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2525 CHECKVAL(u32, UINT32_C(0x05c584ce), "%#018RX32");
2526 u32 = ASMMultU32ByU32DivByU32(UINT32_C(0x10359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2527 CHECKVAL(u32, UINT32_C(0x2d860795), "%#018RX32");
2528
2529#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2530 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000000000001), UINT32_C(0x00000001), UINT32_C(0x00000001));
2531 CHECKVAL(u64, UINT64_C(0x0000000000000001), "%#018RX64");
2532 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x0000000100000000), UINT32_C(0x80000000), UINT32_C(0x00000002));
2533 CHECKVAL(u64, UINT64_C(0x4000000000000000), "%#018RX64");
2534 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfedcba9876543210), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2535 CHECKVAL(u64, UINT64_C(0xfedcba9876543210), "%#018RX64");
2536 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xffffffff), UINT32_C(0xffffffff));
2537 CHECKVAL(u64, UINT64_C(0xffffffffffffffff), "%#018RX64");
2538 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xffffffffffffffff), UINT32_C(0xfffffff0), UINT32_C(0xffffffff));
2539 CHECKVAL(u64, UINT64_C(0xfffffff0fffffff0), "%#018RX64");
2540 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0x58734981), UINT32_C(0xf8694045));
2541 CHECKVAL(u64, UINT64_C(0x128b9c3d43184763), "%#018RX64");
2542 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0x3415934810359583), UINT32_C(0xf8694045), UINT32_C(0x58734981));
2543 CHECKVAL(u64, UINT64_C(0x924719355cd35a27), "%#018RX64");
2544
2545# if 0 /* bird: question is whether this should trap or not:
2546 *
2547 * frank: Of course it must trap:
2548 *
2549 * 0xfffffff8 * 0x77d7daf8 = 0x77d7daf441412840
2550 *
2551 * During the following division, the quotient must fit into a 32-bit register.
2552 * Therefore the smallest valid divisor is
2553 *
2554 * (0x77d7daf441412840 >> 32) + 1 = 0x77d7daf5
2555 *
2556 * which is definitely greater than 0x3b9aca00.
2557 *
2558 * bird: No, the C version does *not* crash. So, the question is whether there's any
2559 * code depending on it not crashing.
2560 *
2561 * Of course the assembly versions of the code crash right now for the reasons you've
2562 * given, but the 32-bit MSC version does not crash.
2563 *
2564 * frank: The C version does not crash but delivers incorrect results for this case.
2565 * The reason is
2566 *
2567 * u.s.Hi = (unsigned long)(u64Hi / u32C);
2568 *
2569 * Here the division is actually 64-bit by 64-bit but the 64-bit result is truncated
2570 * to 32 bit. If using this (optimized and fast) function we should just be sure that
2571 * the operands are in a valid range.
2572 */
2573 u64 = ASMMultU64ByU32DivByU32(UINT64_C(0xfffffff8c65d6731), UINT32_C(0x77d7daf8), UINT32_C(0x3b9aca00));
2574 CHECKVAL(u64, UINT64_C(0x02b8f9a2aa74e3dc), "%#018RX64");
2575# endif
2576#endif /* AMD64 || X86 */
2577
2578 u32 = ASMModU64ByU32RetU32(UINT64_C(0x0ffffff8c65d6731), UINT32_C(0x77d7daf8));
2579 CHECKVAL(u32, UINT32_C(0x3B642451), "%#010RX32");
2580
2581 int32_t i32;
2582 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(-2));
2583 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2584 i32 = ASMModS64ByS32RetS32(INT64_C(-11), INT32_C(2));
2585 CHECKVAL(i32, INT32_C(-1), "%010RI32");
2586 i32 = ASMModS64ByS32RetS32(INT64_C(11), INT32_C(-2));
2587 CHECKVAL(i32, INT32_C(1), "%010RI32");
2588
2589 i32 = ASMModS64ByS32RetS32(INT64_C(92233720368547758), INT32_C(2147483647));
2590 CHECKVAL(i32, INT32_C(2104533974), "%010RI32");
2591 i32 = ASMModS64ByS32RetS32(INT64_C(-92233720368547758), INT32_C(2147483647));
2592 CHECKVAL(i32, INT32_C(-2104533974), "%010RI32");
2593}
2594
2595
2596void tstASMByteSwap(void)
2597{
2598 RTTestSub(g_hTest, "ASMByteSwap*");
2599
2600 uint64_t u64In = UINT64_C(0x0011223344556677);
2601 uint64_t u64Out = ASMByteSwapU64(u64In);
2602 CHECKVAL(u64In, UINT64_C(0x0011223344556677), "%#018RX64");
2603 CHECKVAL(u64Out, UINT64_C(0x7766554433221100), "%#018RX64");
2604 u64Out = ASMByteSwapU64(u64Out);
2605 CHECKVAL(u64Out, u64In, "%#018RX64");
2606 u64In = UINT64_C(0x0123456789abcdef);
2607 u64Out = ASMByteSwapU64(u64In);
2608 CHECKVAL(u64In, UINT64_C(0x0123456789abcdef), "%#018RX64");
2609 CHECKVAL(u64Out, UINT64_C(0xefcdab8967452301), "%#018RX64");
2610 u64Out = ASMByteSwapU64(u64Out);
2611 CHECKVAL(u64Out, u64In, "%#018RX64");
2612 u64In = 0;
2613 u64Out = ASMByteSwapU64(u64In);
2614 CHECKVAL(u64Out, u64In, "%#018RX64");
2615 u64In = UINT64_MAX;
2616 u64Out = ASMByteSwapU64(u64In);
2617 CHECKVAL(u64Out, u64In, "%#018RX64");
2618
2619 uint32_t u32In = UINT32_C(0x00112233);
2620 uint32_t u32Out = ASMByteSwapU32(u32In);
2621 CHECKVAL(u32In, UINT32_C(0x00112233), "%#010RX32");
2622 CHECKVAL(u32Out, UINT32_C(0x33221100), "%#010RX32");
2623 u32Out = ASMByteSwapU32(u32Out);
2624 CHECKVAL(u32Out, u32In, "%#010RX32");
2625 u32In = UINT32_C(0x12345678);
2626 u32Out = ASMByteSwapU32(u32In);
2627 CHECKVAL(u32In, UINT32_C(0x12345678), "%#010RX32");
2628 CHECKVAL(u32Out, UINT32_C(0x78563412), "%#010RX32");
2629 u32Out = ASMByteSwapU32(u32Out);
2630 CHECKVAL(u32Out, u32In, "%#010RX32");
2631 u32In = 0;
2632 u32Out = ASMByteSwapU32(u32In);
2633 CHECKVAL(u32Out, u32In, "%#010RX32");
2634 u32In = UINT32_MAX;
2635 u32Out = ASMByteSwapU32(u32In);
2636 CHECKVAL(u32Out, u32In, "%#010RX32");
2637
2638 uint16_t u16In = UINT16_C(0x0011);
2639 uint16_t u16Out = ASMByteSwapU16(u16In);
2640 CHECKVAL(u16In, UINT16_C(0x0011), "%#06RX16");
2641 CHECKVAL(u16Out, UINT16_C(0x1100), "%#06RX16");
2642 u16Out = ASMByteSwapU16(u16Out);
2643 CHECKVAL(u16Out, u16In, "%#06RX16");
2644 u16In = UINT16_C(0x1234);
2645 u16Out = ASMByteSwapU16(u16In);
2646 CHECKVAL(u16In, UINT16_C(0x1234), "%#06RX16");
2647 CHECKVAL(u16Out, UINT16_C(0x3412), "%#06RX16");
2648 u16Out = ASMByteSwapU16(u16Out);
2649 CHECKVAL(u16Out, u16In, "%#06RX16");
2650 u16In = 0;
2651 u16Out = ASMByteSwapU16(u16In);
2652 CHECKVAL(u16Out, u16In, "%#06RX16");
2653 u16In = UINT16_MAX;
2654 u16Out = ASMByteSwapU16(u16In);
2655 CHECKVAL(u16Out, u16In, "%#06RX16");
2656}
2657
2658
2659void tstASMBench(void)
2660{
2661 /*
2662 * Make this static. We don't want to have this located on the stack.
2663 */
2664 static uint8_t volatile s_u8;
2665 static int8_t volatile s_i8;
2666 static uint16_t volatile s_u16;
2667 static int16_t volatile s_i16;
2668 static uint32_t volatile s_u32;
2669 static int32_t volatile s_i32;
2670 static uint64_t volatile s_u64;
2671 static int64_t volatile s_i64;
2672 unsigned i;
2673 const unsigned cRounds = _16M; /* Must be multiple of 8 */
2674 uint64_t u64Elapsed;
2675
2676 RTTestSub(g_hTest, "Benchmarking");
2677
2678#if 0 && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2679# define BENCH(op, str) \
2680 do { \
2681 RTThreadYield(); \
2682 u64Elapsed = ASMReadTSC(); \
2683 for (i = cRounds; i > 0; i--) \
2684 op; \
2685 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2686 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_TICKS_PER_CALL); \
2687 } while (0)
2688#else
2689# define BENCH(op, str) \
2690 do { \
2691 RTThreadYield(); \
2692 u64Elapsed = RTTimeNanoTS(); \
2693 for (i = cRounds / 8; i > 0; i--) \
2694 { \
2695 op; \
2696 op; \
2697 op; \
2698 op; \
2699 op; \
2700 op; \
2701 op; \
2702 op; \
2703 } \
2704 u64Elapsed = RTTimeNanoTS() - u64Elapsed; \
2705 RTTestValue(g_hTest, str, u64Elapsed / cRounds, RTTESTUNIT_NS_PER_CALL); \
2706 } while (0)
2707#endif
2708#if (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)) && !defined(GCC44_32BIT_PIC)
2709# define BENCH_TSC(op, str) \
2710 do { \
2711 RTThreadYield(); \
2712 u64Elapsed = ASMReadTSC(); \
2713 for (i = cRounds / 8; i > 0; i--) \
2714 { \
2715 op; \
2716 op; \
2717 op; \
2718 op; \
2719 op; \
2720 op; \
2721 op; \
2722 op; \
2723 } \
2724 u64Elapsed = ASMReadTSC() - u64Elapsed; \
2725 RTTestValue(g_hTest, str, u64Elapsed / cRounds, /*RTTESTUNIT_TICKS_PER_CALL*/ RTTESTUNIT_NONE); \
2726 } while (0)
2727#else
2728# define BENCH_TSC(op, str) BENCH(op, str)
2729#endif
2730
2731 BENCH(s_u32 = 0, "s_u32 = 0");
2732 BENCH(ASMAtomicUoReadU8(&s_u8), "ASMAtomicUoReadU8");
2733 BENCH(ASMAtomicUoReadS8(&s_i8), "ASMAtomicUoReadS8");
2734 BENCH(ASMAtomicUoReadU16(&s_u16), "ASMAtomicUoReadU16");
2735 BENCH(ASMAtomicUoReadS16(&s_i16), "ASMAtomicUoReadS16");
2736 BENCH(ASMAtomicUoReadU32(&s_u32), "ASMAtomicUoReadU32");
2737 BENCH(ASMAtomicUoReadS32(&s_i32), "ASMAtomicUoReadS32");
2738 BENCH(ASMAtomicUoReadU64(&s_u64), "ASMAtomicUoReadU64");
2739 BENCH(ASMAtomicUoReadS64(&s_i64), "ASMAtomicUoReadS64");
2740 BENCH(ASMAtomicReadU8(&s_u8), "ASMAtomicReadU8");
2741 BENCH(ASMAtomicReadS8(&s_i8), "ASMAtomicReadS8");
2742 BENCH(ASMAtomicReadU16(&s_u16), "ASMAtomicReadU16");
2743 BENCH(ASMAtomicReadS16(&s_i16), "ASMAtomicReadS16");
2744 BENCH(ASMAtomicReadU32(&s_u32), "ASMAtomicReadU32");
2745 BENCH(ASMAtomicReadS32(&s_i32), "ASMAtomicReadS32");
2746 BENCH(ASMAtomicReadU64(&s_u64), "ASMAtomicReadU64");
2747 BENCH(ASMAtomicReadS64(&s_i64), "ASMAtomicReadS64");
2748 BENCH(ASMAtomicUoWriteU8(&s_u8, 0), "ASMAtomicUoWriteU8");
2749 BENCH(ASMAtomicUoWriteS8(&s_i8, 0), "ASMAtomicUoWriteS8");
2750 BENCH(ASMAtomicUoWriteU16(&s_u16, 0), "ASMAtomicUoWriteU16");
2751 BENCH(ASMAtomicUoWriteS16(&s_i16, 0), "ASMAtomicUoWriteS16");
2752 BENCH(ASMAtomicUoWriteU32(&s_u32, 0), "ASMAtomicUoWriteU32");
2753 BENCH(ASMAtomicUoWriteS32(&s_i32, 0), "ASMAtomicUoWriteS32");
2754 BENCH(ASMAtomicUoWriteU64(&s_u64, 0), "ASMAtomicUoWriteU64");
2755 BENCH(ASMAtomicUoWriteS64(&s_i64, 0), "ASMAtomicUoWriteS64");
2756 BENCH(ASMAtomicWriteU8(&s_u8, 0), "ASMAtomicWriteU8");
2757 BENCH(ASMAtomicWriteS8(&s_i8, 0), "ASMAtomicWriteS8");
2758 BENCH(ASMAtomicWriteU16(&s_u16, 0), "ASMAtomicWriteU16");
2759 BENCH(ASMAtomicWriteS16(&s_i16, 0), "ASMAtomicWriteS16");
2760 BENCH(ASMAtomicWriteU32(&s_u32, 0), "ASMAtomicWriteU32");
2761 BENCH(ASMAtomicWriteS32(&s_i32, 0), "ASMAtomicWriteS32");
2762 BENCH(ASMAtomicWriteU64(&s_u64, 0), "ASMAtomicWriteU64");
2763 BENCH(ASMAtomicWriteS64(&s_i64, 0), "ASMAtomicWriteS64");
2764 BENCH(ASMAtomicXchgU8(&s_u8, 0), "ASMAtomicXchgU8");
2765 BENCH(ASMAtomicXchgS8(&s_i8, 0), "ASMAtomicXchgS8");
2766 BENCH(ASMAtomicXchgU16(&s_u16, 0), "ASMAtomicXchgU16");
2767 BENCH(ASMAtomicXchgS16(&s_i16, 0), "ASMAtomicXchgS16");
2768 BENCH(ASMAtomicXchgU32(&s_u32, 0), "ASMAtomicXchgU32");
2769 BENCH(ASMAtomicXchgS32(&s_i32, 0), "ASMAtomicXchgS32");
2770 BENCH(ASMAtomicXchgU64(&s_u64, 0), "ASMAtomicXchgU64");
2771 BENCH(ASMAtomicXchgS64(&s_i64, 0), "ASMAtomicXchgS64");
2772 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 0), "ASMAtomicCmpXchgU32");
2773 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 0), "ASMAtomicCmpXchgS32");
2774 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 0), "ASMAtomicCmpXchgU64");
2775 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 0), "ASMAtomicCmpXchgS64");
2776 BENCH(ASMAtomicCmpXchgU32(&s_u32, 0, 1), "ASMAtomicCmpXchgU32/neg");
2777 BENCH(ASMAtomicCmpXchgS32(&s_i32, 0, 1), "ASMAtomicCmpXchgS32/neg");
2778 BENCH(ASMAtomicCmpXchgU64(&s_u64, 0, 1), "ASMAtomicCmpXchgU64/neg");
2779 BENCH(ASMAtomicCmpXchgS64(&s_i64, 0, 1), "ASMAtomicCmpXchgS64/neg");
2780 BENCH(ASMAtomicIncU32(&s_u32), "ASMAtomicIncU32");
2781 BENCH(ASMAtomicIncS32(&s_i32), "ASMAtomicIncS32");
2782 BENCH(ASMAtomicDecU32(&s_u32), "ASMAtomicDecU32");
2783 BENCH(ASMAtomicDecS32(&s_i32), "ASMAtomicDecS32");
2784 BENCH(ASMAtomicAddU32(&s_u32, 5), "ASMAtomicAddU32");
2785 BENCH(ASMAtomicAddS32(&s_i32, 5), "ASMAtomicAddS32");
2786 BENCH(ASMAtomicUoIncU32(&s_u32), "ASMAtomicUoIncU32");
2787 BENCH(ASMAtomicUoDecU32(&s_u32), "ASMAtomicUoDecU32");
2788 BENCH(ASMAtomicUoAndU32(&s_u32, 0xffffffff), "ASMAtomicUoAndU32");
2789 BENCH(ASMAtomicUoOrU32(&s_u32, 0xffffffff), "ASMAtomicUoOrU32");
2790#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
2791 BENCH_TSC(ASMSerializeInstructionCpuId(), "ASMSerializeInstructionCpuId");
2792 BENCH_TSC(ASMSerializeInstructionIRet(), "ASMSerializeInstructionIRet");
2793#endif
2794 BENCH(ASMReadFence(), "ASMReadFence");
2795 BENCH(ASMWriteFence(), "ASMWriteFence");
2796 BENCH(ASMMemoryFence(), "ASMMemoryFence");
2797 BENCH(ASMSerializeInstruction(), "ASMSerializeInstruction");
2798 BENCH(ASMNopPause(), "ASMNopPause");
2799
2800 /* The Darwin gcc does not like this ... */
2801#if !defined(RT_OS_DARWIN) && !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2802 BENCH(s_u8 = ASMGetApicId(), "ASMGetApicId");
2803 BENCH(s_u32 = ASMGetApicIdExt0B(), "ASMGetApicIdExt0B");
2804 BENCH(s_u32 = ASMGetApicIdExt8000001E(), "ASMGetApicIdExt8000001E");
2805#endif
2806#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2807 uint32_t uAux;
2808 if ( ASMHasCpuId()
2809 && ASMIsValidExtRange(ASMCpuId_EAX(0x80000000))
2810 && (ASMCpuId_EDX(0x80000001) & X86_CPUID_EXT_FEATURE_EDX_RDTSCP) )
2811 {
2812 BENCH_TSC(ASMSerializeInstructionRdTscp(), "ASMSerializeInstructionRdTscp");
2813 BENCH(s_u64 = ASMReadTscWithAux(&uAux), "ASMReadTscWithAux");
2814 }
2815 BENCH(s_u64 = ASMReadTSC(), "ASMReadTSC");
2816 union
2817 {
2818 uint64_t u64[2];
2819 RTIDTR Unaligned;
2820 struct
2821 {
2822 uint16_t abPadding[3];
2823 RTIDTR Aligned;
2824 } s;
2825 } uBuf;
2826 Assert(((uintptr_t)&uBuf.Unaligned.pIdt & (sizeof(uintptr_t) - 1)) != 0);
2827 BENCH(ASMGetIDTR(&uBuf.Unaligned), "ASMGetIDTR/unaligned");
2828 Assert(((uintptr_t)&uBuf.s.Aligned.pIdt & (sizeof(uintptr_t) - 1)) == 0);
2829 BENCH(ASMGetIDTR(&uBuf.s.Aligned), "ASMGetIDTR/aligned");
2830#endif
2831
2832#undef BENCH
2833}
2834
2835
2836int main(int argc, char **argv)
2837{
2838 RT_NOREF_PV(argc); RT_NOREF_PV(argv);
2839
2840 int rc = RTTestInitAndCreate("tstRTInlineAsm", &g_hTest);
2841 if (rc)
2842 return rc;
2843 RTTestBanner(g_hTest);
2844
2845 /*
2846 * Execute the tests.
2847 */
2848#if !defined(GCC44_32BIT_PIC) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86))
2849 tstASMCpuId();
2850 //bruteForceCpuId();
2851#endif
2852#if 1
2853 tstASMAtomicRead();
2854 tstASMAtomicWrite();
2855 tstASMAtomicXchg();
2856 tstASMAtomicCmpXchg();
2857 tstASMAtomicCmpXchgEx();
2858
2859 tstASMAtomicAdd();
2860 tstASMAtomicDecInc();
2861 tstASMAtomicAndOrXor();
2862
2863 tstASMMemZeroPage();
2864 tstASMMemIsZeroPage(g_hTest);
2865 tstASMMemFirstMismatchingU8(g_hTest);
2866 tstASMMemZero32();
2867 tstASMMemFill32();
2868 tstASMProbe(g_hTest);
2869
2870 tstASMMisc();
2871
2872 tstASMMath();
2873
2874 tstASMByteSwap();
2875
2876 tstASMBench();
2877#endif
2878
2879 /*
2880 * Show the result.
2881 */
2882 return RTTestSummaryAndDestroy(g_hTest);
2883}
2884
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette