@@ -129,6 +129,11 @@ static inline unsigned long fetchadd64_acquire_release (unsigned long *ptr, unsi
129
129
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [newval ] "=&r" (newval ), [ptr ] "+Q" (* ptr )
130
130
: [val ] "r" (addend )
131
131
: "memory" );
132
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
133
+ asm volatile ("amoadd.d.aqrl %[old], %[val], %[ptr]"
134
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
135
+ : [val ] "r" (addend )
136
+ : "memory" );
132
137
#else
133
138
old = __atomic_fetch_add (ptr , addend , __ATOMIC_ACQ_REL );
134
139
#endif
@@ -162,6 +167,11 @@ static inline unsigned long fetchadd64_acquire (unsigned long *ptr, unsigned lon
162
167
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [newval ] "=&r" (newval ), [ptr ] "+Q" (* ptr )
163
168
: [val ] "r" (addend )
164
169
: "memory" );
170
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
171
+ asm volatile ("amoadd.d.aq %[old], %[val], %[ptr]"
172
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
173
+ : [val ] "r" (addend )
174
+ : "memory" );
165
175
#else
166
176
old = __atomic_fetch_add (ptr , addend , __ATOMIC_ACQUIRE );
167
177
#endif
@@ -196,6 +206,11 @@ static inline unsigned long fetchadd64_release (unsigned long *ptr, unsigned lon
196
206
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [newval ] "=&r" (newval ), [ptr ] "+Q" (* ptr )
197
207
: [val ] "r" (addend )
198
208
: "memory" );
209
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
210
+ asm volatile ("amoadd.d.rl %[old], %[val], %[ptr]"
211
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
212
+ : [val ] "r" (addend )
213
+ : "memory" );
199
214
#else
200
215
old = __atomic_fetch_add (ptr , addend , __ATOMIC_RELEASE );
201
216
#endif
@@ -229,6 +244,11 @@ static inline unsigned long fetchadd64 (unsigned long *ptr, unsigned long addend
229
244
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [newval ] "=&r" (newval ), [ptr ] "+Q" (* ptr )
230
245
: [val ] "r" (addend )
231
246
: "memory" );
247
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
248
+ asm volatile ("amoadd.d %[old], %[val], %[ptr]"
249
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
250
+ : [val ] "r" (addend )
251
+ : "memory" );
232
252
#else
233
253
old = __atomic_fetch_add (ptr , addend , __ATOMIC_RELAXED );
234
254
#endif
@@ -265,6 +285,12 @@ static inline unsigned long fetchsub64 (unsigned long *ptr, unsigned long addend
265
285
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [newval ] "=&r" (newval ), [ptr ] "+Q" (* ptr )
266
286
: [val ] "r" (addend )
267
287
: "memory" );
288
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
289
+ addend = (unsigned long ) (- (long ) addend );
290
+ asm volatile ("amoadd.d %[old], %[val], %[ptr]"
291
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
292
+ : [val ] "r" (addend )
293
+ : "memory" );
268
294
#else
269
295
old = __atomic_fetch_sub (ptr , addend , __ATOMIC_RELAXED );
270
296
#endif
@@ -296,6 +322,11 @@ static inline unsigned long swap64 (unsigned long *ptr, unsigned long val) {
296
322
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [ptr ] "+Q" (* ptr )
297
323
: [val ] "r" (val )
298
324
: "memory" );
325
+ #elif defined(__riscv ) && !defined(USE_BUILTIN )
326
+ asm volatile ("amoswap.d.aqrl %[old], %[val], %[ptr]"
327
+ : [old ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
328
+ : [val ] "r" (val )
329
+ : "memory" );
299
330
#else
300
331
old = __atomic_exchange_n (ptr , val , __ATOMIC_ACQ_REL );
301
332
#endif
@@ -330,6 +361,22 @@ static inline unsigned long cas64 (unsigned long *ptr, unsigned long newval, uns
330
361
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [ptr ] "+Q" (* ptr )
331
362
: [exp ] "r" (expected ), [val ] "r" (newval )
332
363
: "memory" );
364
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && !defined(__riscv_zacas )
365
+ unsigned long tmp ;
366
+
367
+ asm volatile ( "1: lr.d %[old], %[ptr]\n"
368
+ " bne %[old], %[exp], 2f\n"
369
+ " sc.d %[tmp], %[val], %[ptr]\n"
370
+ " bnez %[tmp], 1b\n"
371
+ "2:"
372
+ : [old ] "=&r" (old ), [tmp ] "=&r" (tmp ), [ptr ] "+A" (* (ptr ))
373
+ : [exp ] "r" (expected ), [val ] "r" (newval )
374
+ : "memory" );
375
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && defined(__riscv_zacas )
376
+ asm volatile ("amocas.d %[exp], %[val], %[ptr]"
377
+ : [exp ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
378
+ : "r[exp]" (expected ), [val ] "r" (newval )
379
+ : "memory" );
333
380
#else
334
381
old = expected ;
335
382
__atomic_compare_exchange_n (ptr , & old , expected , true, __ATOMIC_RELAXED , __ATOMIC_RELAXED );
@@ -365,6 +412,22 @@ static inline unsigned long cas64_acquire (unsigned long *ptr, unsigned long val
365
412
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [ptr ] "+Q" (* ptr )
366
413
: [exp ] "r" (exp ), [val ] "r" (val )
367
414
: "memory" );
415
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && !defined(__riscv_zacas )
416
+ unsigned long tmp ;
417
+
418
+ asm volatile ( "1: lr.d.aq %[old], %[ptr]\n"
419
+ " bne %[old], %[exp], 2f\n"
420
+ " sc.d %[tmp], %[newval], %[ptr]\n"
421
+ " bnez %[tmp], 1b\n"
422
+ "2:"
423
+ : [old ] "=&r" (old ), [tmp ] "=&r" (tmp ), [ptr ] "+A" (* (ptr ))
424
+ : [exp ] "r" (exp ), [newval ] "r" (val )
425
+ : "memory" );
426
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && defined(__riscv_zacas )
427
+ asm volatile ("amocas.d %[exp], %[val], %[ptr]"
428
+ : [exp ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
429
+ : "r[exp]" (exp ), [val ] "r" (val )
430
+ : "memory" );
368
431
#else
369
432
old = exp ;
370
433
__atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_ACQUIRE , __ATOMIC_ACQUIRE );
@@ -400,6 +463,22 @@ static inline unsigned long cas64_release (unsigned long *ptr, unsigned long val
400
463
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [ptr ] "+Q" (* ptr )
401
464
: [exp ] "r" (exp ), [val ] "r" (val )
402
465
: "memory" );
466
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && !defined(__riscv_zacas )
467
+ unsigned long tmp ;
468
+
469
+ asm volatile ( "1: lr.d %[old], %[ptr]\n"
470
+ " bne %[old], %[exp], 2f\n"
471
+ " sc.d.rl %[tmp], %[val], %[ptr]\n"
472
+ " bnez %[tmp], 1b\n"
473
+ "2:"
474
+ : [old ] "=&r" (old ), [tmp ] "=&r" (tmp ), [ptr ] "+A" (* (ptr ))
475
+ : [exp ] "r" (exp ), [val ] "r" (val )
476
+ : "memory" );
477
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && defined(__riscv_zacas )
478
+ asm volatile ("amocas.d.rl %[exp], %[val], %[ptr]"
479
+ : [exp ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
480
+ : "r[exp]" (exp ), [val ] "r" (val )
481
+ : "memory" );
403
482
#else
404
483
old = exp ;
405
484
__atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_RELEASE , __ATOMIC_RELAXED ); // XXX: is relaxed for failure OK?
@@ -435,6 +514,22 @@ static inline unsigned long cas64_acquire_release (unsigned long *ptr, unsigned
435
514
: [tmp ] "=&r" (tmp ), [old ] "=&r" (old ), [ptr ] "+Q" (* ptr )
436
515
: [exp ] "r" (exp ), [val ] "r" (val )
437
516
: "memory" );
517
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && !defined(__riscv_zacas )
518
+ unsigned long tmp ;
519
+
520
+ asm volatile ( "1: lr.d.aq %[old], %[ptr]\n"
521
+ " bne %[old], %[exp], 2f\n"
522
+ " sc.d.rl %[tmp], %[val], %[ptr]\n"
523
+ " bnez %[tmp], 1b\n"
524
+ "2:"
525
+ : [old ] "=&r" (old ), [tmp ] "=&r" (tmp ), [ptr ] "+A" (* (ptr ))
526
+ : [exp ] "r" (exp ), [val ] "r" (val )
527
+ : "memory" );
528
+ #elif defined(__riscv ) && !defined(USE_BUILTIN ) && defined(__riscv_zacas )
529
+ asm volatile ("amocas.d.aqrl %[exp], %[val], %[ptr]"
530
+ : [exp ] "=&r" (old ), [ptr ] "+A" (* (ptr ))
531
+ : "r[exp]" (exp ), [val ] "r" (val )
532
+ : "memory" );
438
533
#else
439
534
old = exp ;
440
535
__atomic_compare_exchange_n (ptr , & old , val , true, __ATOMIC_ACQ_REL ,
0 commit comments