DPDK 23.11.2
Loading...
Searching...
No Matches
rte_atomic.h
Go to the documentation of this file.
1/* SPDX-License-Identifier: BSD-3-Clause
2 * Copyright(c) 2010-2014 Intel Corporation
3 */
4
5#ifndef _RTE_ATOMIC_H_
6#define _RTE_ATOMIC_H_
7
15#include <stdint.h>
16
17#include <rte_common.h>
18#include <rte_stdatomic.h>
19
20#ifdef __DOXYGEN__
21
25
31static inline void rte_mb(void);
32
39static inline void rte_wmb(void);
40
47static inline void rte_rmb(void);
49
53
68static inline void rte_smp_mb(void);
69
88static inline void rte_smp_wmb(void);
89
108static inline void rte_smp_rmb(void);
110
114
121static inline void rte_io_mb(void);
122
130static inline void rte_io_wmb(void);
131
139static inline void rte_io_rmb(void);
141
142#endif /* __DOXYGEN__ */
143
150#ifdef RTE_TOOLCHAIN_MSVC
151#define rte_compiler_barrier() _ReadWriteBarrier()
152#else
153#define rte_compiler_barrier() do { \
154 asm volatile ("" : : : "memory"); \
155} while(0)
156#endif
157
161static inline void rte_atomic_thread_fence(rte_memory_order memorder);
162
163/*------------------------- 16 bit atomic operations -------------------------*/
164
165#ifndef RTE_TOOLCHAIN_MSVC
166
183static inline int
184rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src);
185
186#ifdef RTE_FORCE_INTRINSICS
187static inline int
188rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
189{
190 return __sync_bool_compare_and_swap(dst, exp, src);
191}
192#endif
193
209static inline uint16_t
210rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val);
211
212#ifdef RTE_FORCE_INTRINSICS
213static inline uint16_t
214rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
215{
216 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
217}
218#endif
219
223typedef struct {
224 volatile int16_t cnt;
226
230#define RTE_ATOMIC16_INIT(val) { (val) }
231
238static inline void
240{
241 v->cnt = 0;
242}
243
252static inline int16_t
254{
255 return v->cnt;
256}
257
266static inline void
267rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
268{
269 v->cnt = new_value;
270}
271
280static inline void
282{
283 rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
284 rte_memory_order_seq_cst);
285}
286
295static inline void
297{
298 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
299 rte_memory_order_seq_cst);
300}
301
308static inline void
310
311#ifdef RTE_FORCE_INTRINSICS
312static inline void
314{
315 rte_atomic16_add(v, 1);
316}
317#endif
318
325static inline void
327
328#ifdef RTE_FORCE_INTRINSICS
329static inline void
331{
332 rte_atomic16_sub(v, 1);
333}
334#endif
335
349static inline int16_t
351{
352 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, inc,
353 rte_memory_order_seq_cst) + inc;
354}
355
370static inline int16_t
372{
373 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, dec,
374 rte_memory_order_seq_cst) - dec;
375}
376
389
390#ifdef RTE_FORCE_INTRINSICS
391static inline int rte_atomic16_inc_and_test(rte_atomic16_t *v)
392{
393 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
394 rte_memory_order_seq_cst) + 1 == 0;
395}
396#endif
397
410
411#ifdef RTE_FORCE_INTRINSICS
412static inline int rte_atomic16_dec_and_test(rte_atomic16_t *v)
413{
414 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int16_t *)&v->cnt, 1,
415 rte_memory_order_seq_cst) - 1 == 0;
416}
417#endif
418
431
432#ifdef RTE_FORCE_INTRINSICS
433static inline int rte_atomic16_test_and_set(rte_atomic16_t *v)
434{
435 return rte_atomic16_cmpset((volatile uint16_t *)&v->cnt, 0, 1);
436}
437#endif
438
445static inline void rte_atomic16_clear(rte_atomic16_t *v)
446{
447 v->cnt = 0;
448}
449
450/*------------------------- 32 bit atomic operations -------------------------*/
451
468static inline int
469rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src);
470
471#ifdef RTE_FORCE_INTRINSICS
472static inline int
473rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
474{
475 return __sync_bool_compare_and_swap(dst, exp, src);
476}
477#endif
478
494static inline uint32_t
495rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val);
496
497#ifdef RTE_FORCE_INTRINSICS
498static inline uint32_t
499rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
500{
501 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
502}
503#endif
504
508typedef struct {
509 volatile int32_t cnt;
511
515#define RTE_ATOMIC32_INIT(val) { (val) }
516
523static inline void
525{
526 v->cnt = 0;
527}
528
537static inline int32_t
539{
540 return v->cnt;
541}
542
551static inline void
552rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
553{
554 v->cnt = new_value;
555}
556
565static inline void
567{
568 rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
569 rte_memory_order_seq_cst);
570}
571
580static inline void
582{
583 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
584 rte_memory_order_seq_cst);
585}
586
593static inline void
595
596#ifdef RTE_FORCE_INTRINSICS
597static inline void
599{
600 rte_atomic32_add(v, 1);
601}
602#endif
603
610static inline void
612
613#ifdef RTE_FORCE_INTRINSICS
614static inline void
616{
617 rte_atomic32_sub(v,1);
618}
619#endif
620
634static inline int32_t
636{
637 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, inc,
638 rte_memory_order_seq_cst) + inc;
639}
640
655static inline int32_t
657{
658 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, dec,
659 rte_memory_order_seq_cst) - dec;
660}
661
674
675#ifdef RTE_FORCE_INTRINSICS
676static inline int rte_atomic32_inc_and_test(rte_atomic32_t *v)
677{
678 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
679 rte_memory_order_seq_cst) + 1 == 0;
680}
681#endif
682
695
696#ifdef RTE_FORCE_INTRINSICS
697static inline int rte_atomic32_dec_and_test(rte_atomic32_t *v)
698{
699 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int32_t *)&v->cnt, 1,
700 rte_memory_order_seq_cst) - 1 == 0;
701}
702#endif
703
716
717#ifdef RTE_FORCE_INTRINSICS
718static inline int rte_atomic32_test_and_set(rte_atomic32_t *v)
719{
720 return rte_atomic32_cmpset((volatile uint32_t *)&v->cnt, 0, 1);
721}
722#endif
723
730static inline void rte_atomic32_clear(rte_atomic32_t *v)
731{
732 v->cnt = 0;
733}
734
735/*------------------------- 64 bit atomic operations -------------------------*/
736
752static inline int
753rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src);
754
755#ifdef RTE_FORCE_INTRINSICS
756static inline int
757rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
758{
759 return __sync_bool_compare_and_swap(dst, exp, src);
760}
761#endif
762
778static inline uint64_t
779rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val);
780
781#ifdef RTE_FORCE_INTRINSICS
782static inline uint64_t
783rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
784{
785 return rte_atomic_exchange_explicit(dst, val, rte_memory_order_seq_cst);
786}
787#endif
788
792typedef struct {
793 volatile int64_t cnt;
795
799#define RTE_ATOMIC64_INIT(val) { (val) }
800
807static inline void
809
810#ifdef RTE_FORCE_INTRINSICS
811static inline void
813{
814#ifdef __LP64__
815 v->cnt = 0;
816#else
817 int success = 0;
818 uint64_t tmp;
819
820 while (success == 0) {
821 tmp = v->cnt;
822 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
823 tmp, 0);
824 }
825#endif
826}
827#endif
828
837static inline int64_t
839
840#ifdef RTE_FORCE_INTRINSICS
841static inline int64_t
843{
844#ifdef __LP64__
845 return v->cnt;
846#else
847 int success = 0;
848 uint64_t tmp;
849
850 while (success == 0) {
851 tmp = v->cnt;
852 /* replace the value by itself */
853 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
854 tmp, tmp);
855 }
856 return tmp;
857#endif
858}
859#endif
860
869static inline void
870rte_atomic64_set(rte_atomic64_t *v, int64_t new_value);
871
872#ifdef RTE_FORCE_INTRINSICS
873static inline void
874rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
875{
876#ifdef __LP64__
877 v->cnt = new_value;
878#else
879 int success = 0;
880 uint64_t tmp;
881
882 while (success == 0) {
883 tmp = v->cnt;
884 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt,
885 tmp, new_value);
886 }
887#endif
888}
889#endif
890
899static inline void
901
902#ifdef RTE_FORCE_INTRINSICS
903static inline void
904rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
905{
906 rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
907 rte_memory_order_seq_cst);
908}
909#endif
910
919static inline void
921
922#ifdef RTE_FORCE_INTRINSICS
923static inline void
924rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
925{
926 rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
927 rte_memory_order_seq_cst);
928}
929#endif
930
937static inline void
939
940#ifdef RTE_FORCE_INTRINSICS
941static inline void
943{
944 rte_atomic64_add(v, 1);
945}
946#endif
947
954static inline void
956
957#ifdef RTE_FORCE_INTRINSICS
958static inline void
960{
961 rte_atomic64_sub(v, 1);
962}
963#endif
964
978static inline int64_t
980
981#ifdef RTE_FORCE_INTRINSICS
982static inline int64_t
984{
985 return rte_atomic_fetch_add_explicit((volatile __rte_atomic int64_t *)&v->cnt, inc,
986 rte_memory_order_seq_cst) + inc;
987}
988#endif
989
1003static inline int64_t
1005
1006#ifdef RTE_FORCE_INTRINSICS
1007static inline int64_t
1009{
1010 return rte_atomic_fetch_sub_explicit((volatile __rte_atomic int64_t *)&v->cnt, dec,
1011 rte_memory_order_seq_cst) - dec;
1012}
1013#endif
1014
1027
1028#ifdef RTE_FORCE_INTRINSICS
1029static inline int rte_atomic64_inc_and_test(rte_atomic64_t *v)
1030{
1031 return rte_atomic64_add_return(v, 1) == 0;
1032}
1033#endif
1034
1047
1048#ifdef RTE_FORCE_INTRINSICS
1049static inline int rte_atomic64_dec_and_test(rte_atomic64_t *v)
1050{
1051 return rte_atomic64_sub_return(v, 1) == 0;
1052}
1053#endif
1054
1067
1068#ifdef RTE_FORCE_INTRINSICS
1069static inline int rte_atomic64_test_and_set(rte_atomic64_t *v)
1070{
1071 return rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, 0, 1);
1072}
1073#endif
1074
1081static inline void rte_atomic64_clear(rte_atomic64_t *v);
1082
1083#ifdef RTE_FORCE_INTRINSICS
1084static inline void rte_atomic64_clear(rte_atomic64_t *v)
1085{
1086 rte_atomic64_set(v, 0);
1087}
1088#endif
1089
1090#endif
1091
1092/*------------------------ 128 bit atomic operations -------------------------*/
1093
1097typedef struct {
1098 union {
1099 uint64_t val[2];
1100#ifdef RTE_ARCH_64
1101#ifndef RTE_TOOLCHAIN_MSVC
1102 __extension__ __int128 int128;
1103#endif
1104#endif
1105 };
1106} __rte_aligned(16) rte_int128_t;
1107
1108#ifdef __DOXYGEN__
1109
1149static inline int
1151 rte_int128_t *exp,
1152 const rte_int128_t *src,
1153 unsigned int weak,
1154 int success,
1155 int failure);
1156
1157#endif /* __DOXYGEN__ */
1158
1159#endif /* _RTE_ATOMIC_H_ */
static int rte_atomic16_dec_and_test(rte_atomic16_t *v)
static void rte_atomic16_dec(rte_atomic16_t *v)
static int rte_atomic32_cmpset(volatile uint32_t *dst, uint32_t exp, uint32_t src)
static int rte_atomic64_cmpset(volatile uint64_t *dst, uint64_t exp, uint64_t src)
static void rte_atomic64_add(rte_atomic64_t *v, int64_t inc)
static int rte_atomic16_cmpset(volatile uint16_t *dst, uint16_t exp, uint16_t src)
static int rte_atomic128_cmp_exchange(rte_int128_t *dst, rte_int128_t *exp, const rte_int128_t *src, unsigned int weak, int success, int failure)
static int rte_atomic64_test_and_set(rte_atomic64_t *v)
static void rte_io_rmb(void)
static void rte_rmb(void)
static void rte_atomic32_clear(rte_atomic32_t *v)
Definition rte_atomic.h:730
static int64_t rte_atomic64_sub_return(rte_atomic64_t *v, int64_t dec)
static void rte_io_mb(void)
static void rte_io_wmb(void)
static int rte_atomic32_inc_and_test(rte_atomic32_t *v)
static int rte_atomic64_dec_and_test(rte_atomic64_t *v)
static void rte_atomic64_clear(rte_atomic64_t *v)
static void rte_smp_mb(void)
static int16_t rte_atomic16_sub_return(rte_atomic16_t *v, int16_t dec)
Definition rte_atomic.h:371
static void rte_atomic16_set(rte_atomic16_t *v, int16_t new_value)
Definition rte_atomic.h:267
static void rte_atomic16_clear(rte_atomic16_t *v)
Definition rte_atomic.h:445
static void rte_atomic16_add(rte_atomic16_t *v, int16_t inc)
Definition rte_atomic.h:281
static uint32_t rte_atomic32_exchange(volatile uint32_t *dst, uint32_t val)
static void rte_mb(void)
static void rte_atomic32_inc(rte_atomic32_t *v)
static void rte_smp_wmb(void)
static void rte_atomic16_sub(rte_atomic16_t *v, int16_t dec)
Definition rte_atomic.h:296
static int rte_atomic32_test_and_set(rte_atomic32_t *v)
static void rte_atomic32_dec(rte_atomic32_t *v)
static uint16_t rte_atomic16_exchange(volatile uint16_t *dst, uint16_t val)
static uint64_t rte_atomic64_exchange(volatile uint64_t *dst, uint64_t val)
static void rte_atomic_thread_fence(rte_memory_order memorder)
static void rte_atomic64_dec(rte_atomic64_t *v)
static int32_t rte_atomic32_sub_return(rte_atomic32_t *v, int32_t dec)
Definition rte_atomic.h:656
static void rte_atomic16_init(rte_atomic16_t *v)
Definition rte_atomic.h:239
static void rte_smp_rmb(void)
static void rte_atomic64_sub(rte_atomic64_t *v, int64_t dec)
static int rte_atomic16_test_and_set(rte_atomic16_t *v)
static int32_t rte_atomic32_read(const rte_atomic32_t *v)
Definition rte_atomic.h:538
static void rte_wmb(void)
static void rte_atomic32_add(rte_atomic32_t *v, int32_t inc)
Definition rte_atomic.h:566
static void rte_atomic32_set(rte_atomic32_t *v, int32_t new_value)
Definition rte_atomic.h:552
static void rte_atomic64_set(rte_atomic64_t *v, int64_t new_value)
static void rte_atomic32_sub(rte_atomic32_t *v, int32_t dec)
Definition rte_atomic.h:581
static void rte_atomic64_inc(rte_atomic64_t *v)
static int16_t rte_atomic16_read(const rte_atomic16_t *v)
Definition rte_atomic.h:253
static int64_t rte_atomic64_add_return(rte_atomic64_t *v, int64_t inc)
static void rte_atomic64_init(rte_atomic64_t *v)
static int16_t rte_atomic16_add_return(rte_atomic16_t *v, int16_t inc)
Definition rte_atomic.h:350
static void rte_atomic32_init(rte_atomic32_t *v)
Definition rte_atomic.h:524
static void rte_atomic16_inc(rte_atomic16_t *v)
static int64_t rte_atomic64_read(rte_atomic64_t *v)
static int rte_atomic64_inc_and_test(rte_atomic64_t *v)
static int rte_atomic32_dec_and_test(rte_atomic32_t *v)
static int rte_atomic16_inc_and_test(rte_atomic16_t *v)
static int32_t rte_atomic32_add_return(rte_atomic32_t *v, int32_t inc)
Definition rte_atomic.h:635
volatile int16_t cnt
Definition rte_atomic.h:224
volatile int32_t cnt
Definition rte_atomic.h:509
volatile int64_t cnt
Definition rte_atomic.h:793