1 /* Process the definitions file with autogen to produce upc_atomic.upc:
3 autogen -L .. upc_atomic.def
6 Free Software Foundation, Inc.
7 This file is part of the UPC runtime Library.
8 Written by Gary Funck <gary@intrepid.com>
9 and Nenad Vukicevic <nenad@intrepid.com>
11 This file is part of GCC.
13 GCC is free software; you can redistribute it and/or modify
14 it under the terms of the GNU General Public License as published by
15 the Free Software Foundation; either version 3, or (at your option)
18 GCC is distributed in the hope that it will be useful,
19 but WITHOUT ANY WARRANTY; without even the implied warranty of
20 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 GNU General Public License for more details.
23 Under Section 7 of GPL version 3, you are granted additional
24 permissions described in the GCC Runtime Library Exception, version
25 3.1, as published by the Free Software Foundation.
27 You should have received a copy of the GNU General Public License and
28 a copy of the GCC Runtime Library Exception along with this program;
29 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
30 <http://www.gnu.org/licenses/>. */
38 #include <upc_atomic.h>
39 #include "upc_config.h"
42 * @file __upc_atomic.upc
43 * GUPC Portals4 UPC atomics implementation.
47 * @addtogroup ATOMIC GUPCR Atomics Functions
51 /** Atomic domain representation */
52 struct upc_atomicdomain_struct
58 /* Represent a bit-encoded operation as an integer. */
59 typedef unsigned int upc_op_num_t;
63 typedef unsigned int UI_type;
65 typedef unsigned long UL_type;
66 typedef long long LL_type;
67 typedef unsigned long long ULL_type;
68 typedef int32_t I32_type;
69 typedef uint32_t UI32_type;
70 typedef int64_t I64_type;
71 typedef uint64_t UI64_type;
73 typedef double D_type;
74 typedef shared void * PTS_type;
77 #define ATOMIC_ACCESS_OPS (UPC_GET | UPC_SET | UPC_CSWAP)
79 #define ATOMIC_NUM_OPS (UPC_ADD | UPC_MULT | UPC_MIN | UPC_MAX | UPC_SUB | UPC_INC | UPC_DEC)
81 #define ATOMIC_BIT_OPS (UPC_AND | UPC_OR | UPC_XOR)
82 #define ATOMIC_ALL_OPS (ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS \
86 * Check if OP is a valid atomic operation type.
88 * @param [in] op UPC atomic operation
89 * @retval TRUE if op is a valid atomic operation
92 __upc_atomic_is_valid_op (upc_op_t op)
94 return !((op & ~(-op)) || (op & ~ATOMIC_ALL_OPS));
98 * Convert the bit-encoded OP into an integer.
100 * @param [in] op UPC atomic operation
101 * @retval op represented as integer index
102 * (UPC_ADD_OP, UPC_MULT_OP ...)
104 static inline upc_op_num_t
105 __upc_atomic_op_num (upc_op_t op)
107 return (LONG_LONG_BITS - 1) - __builtin_clzll ((long long) op);
111 * Check if UPC_TYPE is a valid atomic operation type.
113 * @param [in] upc_type UPC atomic type
114 * @retval TRUE if atomic operations are supported on UPC_TYPE
117 __upc_atomic_is_valid_type (upc_type_t upc_type)
141 * Return the atomic operations supported for type UPC_TYPE.
143 * @param [in] upc_type UPC atomic type
144 * @retval bit vector of supported atomic operations.
147 __upc_atomic_supported_ops (upc_type_t upc_type)
152 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
154 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
156 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
158 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
160 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
162 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
164 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
166 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
168 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
170 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS | ATOMIC_BIT_OPS;
172 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS;
174 return ATOMIC_ACCESS_OPS | ATOMIC_NUM_OPS;
176 return ATOMIC_ACCESS_OPS;
182 * Convert UPC atomic operation into a string.
184 * @param [in] upc_op UPC atomic operation
185 * @retval Character string
188 __upc_atomic_op_name (upc_op_num_t op_num)
223 * Convert UPC atomic type into a string.
225 * @param [in] upc_type UPC atomic type
226 * @retval Character string
229 __upc_atomic_type_name (upc_type_t upc_type)
263 #define REQ_FETCH_PTR 0b00000001
264 #define REQ_OPERAND1 0b00000010
265 #define REQ_OPERAND2 0b00000100
266 #define NULL_OPERAND1 0b00001000
267 #define NULL_OPERAND2 0b00010000
269 static const unsigned int operand_check[] =
271 /* UPC_ADD_OP */ REQ_OPERAND1 | NULL_OPERAND2,
272 /* UPC_MULT_OP */ REQ_OPERAND1 | NULL_OPERAND2,
273 /* UPC_AND_OP */ REQ_OPERAND1 | NULL_OPERAND2,
274 /* UPC_OR_OP */ REQ_OPERAND1 | NULL_OPERAND2,
275 /* UPC_XOR_OP */ REQ_OPERAND1 | NULL_OPERAND2,
276 /* UPC_LOGAND_OP */ 0,
277 /* UPC_LOGOR_OP */ 0,
278 /* UPC_MIN_OP */ REQ_OPERAND1 | NULL_OPERAND2,
279 /* UPC_MAX_OP */ REQ_OPERAND1 | NULL_OPERAND2,
280 /* UPC_GET_OP */ REQ_FETCH_PTR | NULL_OPERAND1 | NULL_OPERAND2,
281 /* UPC_SET_OP */ REQ_OPERAND1 | NULL_OPERAND2,
282 /* UPC_CSWAP_OP */ REQ_OPERAND1 | REQ_OPERAND2,
283 /* UPC_SUB_OP */ REQ_OPERAND1 | NULL_OPERAND2,
284 /* UPC_INC_OP */ NULL_OPERAND1 | NULL_OPERAND2,
285 /* UPC_DEC_OP */ NULL_OPERAND1 | NULL_OPERAND2,
289 __upc_atomic_check_operands (upc_op_num_t op_num,
290 void * restrict fetch_ptr,
291 const void * restrict operand1,
292 const void * restrict operand2)
294 const unsigned int check = operand_check[op_num];
295 if ((check & REQ_FETCH_PTR) && fetch_ptr == NULL)
296 __upc_fatal ("atomic operation `%s' "
297 "requires a non-NULL fetch pointer",
298 __upc_atomic_op_name (op_num));
299 if ((check & REQ_OPERAND1) && operand1 == NULL)
300 __upc_fatal ("atomic operation `%s' "
301 "requires a non-NULL operand1 pointer",
302 __upc_atomic_op_name (op_num));
303 if ((check & REQ_OPERAND2) && operand2 == NULL)
304 __upc_fatal ("atomic operation `%s' "
305 "requires a non-NULL operand2 pointer",
306 __upc_atomic_op_name (op_num));
307 if ((check & NULL_OPERAND1) && operand1 != NULL)
308 __upc_fatal ("atomic operation `%s' "
309 "requires a NULL operand1 pointer",
310 __upc_atomic_op_name (op_num));
311 if ((check & NULL_OPERAND2) && operand2 != NULL)
312 __upc_fatal ("atomic operation `%s' "
313 "requires a NULL operand2 pointer",
314 __upc_atomic_op_name (op_num));
319 I_type * restrict fetch_ptr,
321 shared I_type * restrict target,
322 I_type * restrict operand1 __attribute__((unused)),
323 I_type * restrict operand2 __attribute__((unused)))
325 I_type orig_value __attribute__((unused));
326 I_type new_value __attribute__((unused));
327 I_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
331 orig_value = __atomic_fetch_add (target_ptr, *operand1,
337 orig_value = *target_ptr;
338 new_value = orig_value * *operand1;
340 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
342 /* success_memmodel */ __ATOMIC_SEQ_CST,
343 /* failure_memmodel */ __ATOMIC_SEQ_CST));
346 orig_value = __atomic_fetch_and (target_ptr, *operand1,
350 orig_value = __atomic_fetch_or (target_ptr, *operand1,
354 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
360 orig_value = *target_ptr;
361 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
363 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
365 /* success_memmodel */ __ATOMIC_SEQ_CST,
366 /* failure_memmodel */ __ATOMIC_SEQ_CST));
371 orig_value = *target_ptr;
372 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
374 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
376 /* success_memmodel */ __ATOMIC_SEQ_CST,
377 /* failure_memmodel */ __ATOMIC_SEQ_CST));
380 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
383 if (fetch_ptr == NULL)
384 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
386 __atomic_exchange (target_ptr, operand1, &orig_value,
387 /* memmodel */ __ATOMIC_SEQ_CST);
392 orig_value = *target_ptr;
393 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
395 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
397 /* success_memmodel */ __ATOMIC_SEQ_CST,
398 /* failure_memmodel */ __ATOMIC_SEQ_CST));
401 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
405 orig_value = __atomic_fetch_add (target_ptr, (int) 1,
409 orig_value = __atomic_fetch_sub (target_ptr, (int) 1,
414 if (fetch_ptr != NULL)
415 *fetch_ptr = orig_value;
420 UI_type * restrict fetch_ptr,
422 shared UI_type * restrict target,
423 UI_type * restrict operand1 __attribute__((unused)),
424 UI_type * restrict operand2 __attribute__((unused)))
426 UI_type orig_value __attribute__((unused));
427 UI_type new_value __attribute__((unused));
428 UI_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
432 orig_value = __atomic_fetch_add (target_ptr, *operand1,
438 orig_value = *target_ptr;
439 new_value = orig_value * *operand1;
441 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
443 /* success_memmodel */ __ATOMIC_SEQ_CST,
444 /* failure_memmodel */ __ATOMIC_SEQ_CST));
447 orig_value = __atomic_fetch_and (target_ptr, *operand1,
451 orig_value = __atomic_fetch_or (target_ptr, *operand1,
455 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
461 orig_value = *target_ptr;
462 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
464 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
466 /* success_memmodel */ __ATOMIC_SEQ_CST,
467 /* failure_memmodel */ __ATOMIC_SEQ_CST));
472 orig_value = *target_ptr;
473 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
475 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
477 /* success_memmodel */ __ATOMIC_SEQ_CST,
478 /* failure_memmodel */ __ATOMIC_SEQ_CST));
481 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
484 if (fetch_ptr == NULL)
485 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
487 __atomic_exchange (target_ptr, operand1, &orig_value,
488 /* memmodel */ __ATOMIC_SEQ_CST);
493 orig_value = *target_ptr;
494 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
496 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
498 /* success_memmodel */ __ATOMIC_SEQ_CST,
499 /* failure_memmodel */ __ATOMIC_SEQ_CST));
502 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
506 orig_value = __atomic_fetch_add (target_ptr, (unsigned int) 1,
510 orig_value = __atomic_fetch_sub (target_ptr, (unsigned int) 1,
515 if (fetch_ptr != NULL)
516 *fetch_ptr = orig_value;
521 L_type * restrict fetch_ptr,
523 shared L_type * restrict target,
524 L_type * restrict operand1 __attribute__((unused)),
525 L_type * restrict operand2 __attribute__((unused)))
527 L_type orig_value __attribute__((unused));
528 L_type new_value __attribute__((unused));
529 L_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
533 orig_value = __atomic_fetch_add (target_ptr, *operand1,
539 orig_value = *target_ptr;
540 new_value = orig_value * *operand1;
542 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
544 /* success_memmodel */ __ATOMIC_SEQ_CST,
545 /* failure_memmodel */ __ATOMIC_SEQ_CST));
548 orig_value = __atomic_fetch_and (target_ptr, *operand1,
552 orig_value = __atomic_fetch_or (target_ptr, *operand1,
556 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
562 orig_value = *target_ptr;
563 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
565 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
567 /* success_memmodel */ __ATOMIC_SEQ_CST,
568 /* failure_memmodel */ __ATOMIC_SEQ_CST));
573 orig_value = *target_ptr;
574 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
576 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
578 /* success_memmodel */ __ATOMIC_SEQ_CST,
579 /* failure_memmodel */ __ATOMIC_SEQ_CST));
582 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
585 if (fetch_ptr == NULL)
586 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
588 __atomic_exchange (target_ptr, operand1, &orig_value,
589 /* memmodel */ __ATOMIC_SEQ_CST);
594 orig_value = *target_ptr;
595 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
597 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
599 /* success_memmodel */ __ATOMIC_SEQ_CST,
600 /* failure_memmodel */ __ATOMIC_SEQ_CST));
603 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
607 orig_value = __atomic_fetch_add (target_ptr, (long) 1,
611 orig_value = __atomic_fetch_sub (target_ptr, (long) 1,
616 if (fetch_ptr != NULL)
617 *fetch_ptr = orig_value;
622 UL_type * restrict fetch_ptr,
624 shared UL_type * restrict target,
625 UL_type * restrict operand1 __attribute__((unused)),
626 UL_type * restrict operand2 __attribute__((unused)))
628 UL_type orig_value __attribute__((unused));
629 UL_type new_value __attribute__((unused));
630 UL_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
634 orig_value = __atomic_fetch_add (target_ptr, *operand1,
640 orig_value = *target_ptr;
641 new_value = orig_value * *operand1;
643 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
645 /* success_memmodel */ __ATOMIC_SEQ_CST,
646 /* failure_memmodel */ __ATOMIC_SEQ_CST));
649 orig_value = __atomic_fetch_and (target_ptr, *operand1,
653 orig_value = __atomic_fetch_or (target_ptr, *operand1,
657 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
663 orig_value = *target_ptr;
664 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
666 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
668 /* success_memmodel */ __ATOMIC_SEQ_CST,
669 /* failure_memmodel */ __ATOMIC_SEQ_CST));
674 orig_value = *target_ptr;
675 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
677 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
679 /* success_memmodel */ __ATOMIC_SEQ_CST,
680 /* failure_memmodel */ __ATOMIC_SEQ_CST));
683 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
686 if (fetch_ptr == NULL)
687 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
689 __atomic_exchange (target_ptr, operand1, &orig_value,
690 /* memmodel */ __ATOMIC_SEQ_CST);
695 orig_value = *target_ptr;
696 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
698 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
700 /* success_memmodel */ __ATOMIC_SEQ_CST,
701 /* failure_memmodel */ __ATOMIC_SEQ_CST));
704 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
708 orig_value = __atomic_fetch_add (target_ptr, (unsigned long) 1,
712 orig_value = __atomic_fetch_sub (target_ptr, (unsigned long) 1,
717 if (fetch_ptr != NULL)
718 *fetch_ptr = orig_value;
723 LL_type * restrict fetch_ptr,
725 shared LL_type * restrict target,
726 LL_type * restrict operand1 __attribute__((unused)),
727 LL_type * restrict operand2 __attribute__((unused)))
729 LL_type orig_value __attribute__((unused));
730 LL_type new_value __attribute__((unused));
731 LL_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
735 orig_value = __atomic_fetch_add (target_ptr, *operand1,
741 orig_value = *target_ptr;
742 new_value = orig_value * *operand1;
744 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
746 /* success_memmodel */ __ATOMIC_SEQ_CST,
747 /* failure_memmodel */ __ATOMIC_SEQ_CST));
750 orig_value = __atomic_fetch_and (target_ptr, *operand1,
754 orig_value = __atomic_fetch_or (target_ptr, *operand1,
758 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
764 orig_value = *target_ptr;
765 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
767 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
769 /* success_memmodel */ __ATOMIC_SEQ_CST,
770 /* failure_memmodel */ __ATOMIC_SEQ_CST));
775 orig_value = *target_ptr;
776 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
778 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
780 /* success_memmodel */ __ATOMIC_SEQ_CST,
781 /* failure_memmodel */ __ATOMIC_SEQ_CST));
784 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
787 if (fetch_ptr == NULL)
788 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
790 __atomic_exchange (target_ptr, operand1, &orig_value,
791 /* memmodel */ __ATOMIC_SEQ_CST);
796 orig_value = *target_ptr;
797 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
799 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
801 /* success_memmodel */ __ATOMIC_SEQ_CST,
802 /* failure_memmodel */ __ATOMIC_SEQ_CST));
805 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
809 orig_value = __atomic_fetch_add (target_ptr, (long long) 1,
813 orig_value = __atomic_fetch_sub (target_ptr, (long long) 1,
818 if (fetch_ptr != NULL)
819 *fetch_ptr = orig_value;
824 ULL_type * restrict fetch_ptr,
826 shared ULL_type * restrict target,
827 ULL_type * restrict operand1 __attribute__((unused)),
828 ULL_type * restrict operand2 __attribute__((unused)))
830 ULL_type orig_value __attribute__((unused));
831 ULL_type new_value __attribute__((unused));
832 ULL_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
836 orig_value = __atomic_fetch_add (target_ptr, *operand1,
842 orig_value = *target_ptr;
843 new_value = orig_value * *operand1;
845 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
847 /* success_memmodel */ __ATOMIC_SEQ_CST,
848 /* failure_memmodel */ __ATOMIC_SEQ_CST));
851 orig_value = __atomic_fetch_and (target_ptr, *operand1,
855 orig_value = __atomic_fetch_or (target_ptr, *operand1,
859 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
865 orig_value = *target_ptr;
866 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
868 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
870 /* success_memmodel */ __ATOMIC_SEQ_CST,
871 /* failure_memmodel */ __ATOMIC_SEQ_CST));
876 orig_value = *target_ptr;
877 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
879 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
881 /* success_memmodel */ __ATOMIC_SEQ_CST,
882 /* failure_memmodel */ __ATOMIC_SEQ_CST));
885 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
888 if (fetch_ptr == NULL)
889 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
891 __atomic_exchange (target_ptr, operand1, &orig_value,
892 /* memmodel */ __ATOMIC_SEQ_CST);
897 orig_value = *target_ptr;
898 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
900 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
902 /* success_memmodel */ __ATOMIC_SEQ_CST,
903 /* failure_memmodel */ __ATOMIC_SEQ_CST));
906 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
910 orig_value = __atomic_fetch_add (target_ptr, (unsigned long long) 1,
914 orig_value = __atomic_fetch_sub (target_ptr, (unsigned long long) 1,
919 if (fetch_ptr != NULL)
920 *fetch_ptr = orig_value;
925 I32_type * restrict fetch_ptr,
927 shared I32_type * restrict target,
928 I32_type * restrict operand1 __attribute__((unused)),
929 I32_type * restrict operand2 __attribute__((unused)))
931 I32_type orig_value __attribute__((unused));
932 I32_type new_value __attribute__((unused));
933 I32_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
937 orig_value = __atomic_fetch_add (target_ptr, *operand1,
943 orig_value = *target_ptr;
944 new_value = orig_value * *operand1;
946 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
948 /* success_memmodel */ __ATOMIC_SEQ_CST,
949 /* failure_memmodel */ __ATOMIC_SEQ_CST));
952 orig_value = __atomic_fetch_and (target_ptr, *operand1,
956 orig_value = __atomic_fetch_or (target_ptr, *operand1,
960 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
966 orig_value = *target_ptr;
967 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
969 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
971 /* success_memmodel */ __ATOMIC_SEQ_CST,
972 /* failure_memmodel */ __ATOMIC_SEQ_CST));
977 orig_value = *target_ptr;
978 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
980 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
982 /* success_memmodel */ __ATOMIC_SEQ_CST,
983 /* failure_memmodel */ __ATOMIC_SEQ_CST));
986 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
989 if (fetch_ptr == NULL)
990 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
992 __atomic_exchange (target_ptr, operand1, &orig_value,
993 /* memmodel */ __ATOMIC_SEQ_CST);
998 orig_value = *target_ptr;
999 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1001 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1003 /* success_memmodel */ __ATOMIC_SEQ_CST,
1004 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1007 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
1011 orig_value = __atomic_fetch_add (target_ptr, (int32_t) 1,
1015 orig_value = __atomic_fetch_sub (target_ptr, (int32_t) 1,
1020 if (fetch_ptr != NULL)
1021 *fetch_ptr = orig_value;
1026 UI32_type * restrict fetch_ptr,
1027 upc_op_num_t op_num,
1028 shared UI32_type * restrict target,
1029 UI32_type * restrict operand1 __attribute__((unused)),
1030 UI32_type * restrict operand2 __attribute__((unused)))
1032 UI32_type orig_value __attribute__((unused));
1033 UI32_type new_value __attribute__((unused));
1034 UI32_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1038 orig_value = __atomic_fetch_add (target_ptr, *operand1,
1044 orig_value = *target_ptr;
1045 new_value = orig_value * *operand1;
1047 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1049 /* success_memmodel */ __ATOMIC_SEQ_CST,
1050 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1053 orig_value = __atomic_fetch_and (target_ptr, *operand1,
1057 orig_value = __atomic_fetch_or (target_ptr, *operand1,
1061 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
1067 orig_value = *target_ptr;
1068 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
1070 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1072 /* success_memmodel */ __ATOMIC_SEQ_CST,
1073 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1078 orig_value = *target_ptr;
1079 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
1081 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1083 /* success_memmodel */ __ATOMIC_SEQ_CST,
1084 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1087 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1090 if (fetch_ptr == NULL)
1091 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1093 __atomic_exchange (target_ptr, operand1, &orig_value,
1094 /* memmodel */ __ATOMIC_SEQ_CST);
1099 orig_value = *target_ptr;
1100 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1102 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1104 /* success_memmodel */ __ATOMIC_SEQ_CST,
1105 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1108 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
1112 orig_value = __atomic_fetch_add (target_ptr, (uint32_t) 1,
1116 orig_value = __atomic_fetch_sub (target_ptr, (uint32_t) 1,
1121 if (fetch_ptr != NULL)
1122 *fetch_ptr = orig_value;
1127 I64_type * restrict fetch_ptr,
1128 upc_op_num_t op_num,
1129 shared I64_type * restrict target,
1130 I64_type * restrict operand1 __attribute__((unused)),
1131 I64_type * restrict operand2 __attribute__((unused)))
1133 I64_type orig_value __attribute__((unused));
1134 I64_type new_value __attribute__((unused));
1135 I64_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1139 orig_value = __atomic_fetch_add (target_ptr, *operand1,
1145 orig_value = *target_ptr;
1146 new_value = orig_value * *operand1;
1148 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1150 /* success_memmodel */ __ATOMIC_SEQ_CST,
1151 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1154 orig_value = __atomic_fetch_and (target_ptr, *operand1,
1158 orig_value = __atomic_fetch_or (target_ptr, *operand1,
1162 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
1168 orig_value = *target_ptr;
1169 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
1171 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1173 /* success_memmodel */ __ATOMIC_SEQ_CST,
1174 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1179 orig_value = *target_ptr;
1180 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
1182 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1184 /* success_memmodel */ __ATOMIC_SEQ_CST,
1185 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1188 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1191 if (fetch_ptr == NULL)
1192 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1194 __atomic_exchange (target_ptr, operand1, &orig_value,
1195 /* memmodel */ __ATOMIC_SEQ_CST);
1200 orig_value = *target_ptr;
1201 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1203 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1205 /* success_memmodel */ __ATOMIC_SEQ_CST,
1206 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1209 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
1213 orig_value = __atomic_fetch_add (target_ptr, (int64_t) 1,
1217 orig_value = __atomic_fetch_sub (target_ptr, (int64_t) 1,
1222 if (fetch_ptr != NULL)
1223 *fetch_ptr = orig_value;
1228 UI64_type * restrict fetch_ptr,
1229 upc_op_num_t op_num,
1230 shared UI64_type * restrict target,
1231 UI64_type * restrict operand1 __attribute__((unused)),
1232 UI64_type * restrict operand2 __attribute__((unused)))
1234 UI64_type orig_value __attribute__((unused));
1235 UI64_type new_value __attribute__((unused));
1236 UI64_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1240 orig_value = __atomic_fetch_add (target_ptr, *operand1,
1246 orig_value = *target_ptr;
1247 new_value = orig_value * *operand1;
1249 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1251 /* success_memmodel */ __ATOMIC_SEQ_CST,
1252 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1255 orig_value = __atomic_fetch_and (target_ptr, *operand1,
1259 orig_value = __atomic_fetch_or (target_ptr, *operand1,
1263 orig_value = __atomic_fetch_xor (target_ptr, *operand1,
1269 orig_value = *target_ptr;
1270 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
1272 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1274 /* success_memmodel */ __ATOMIC_SEQ_CST,
1275 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1280 orig_value = *target_ptr;
1281 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
1283 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1285 /* success_memmodel */ __ATOMIC_SEQ_CST,
1286 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1289 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1292 if (fetch_ptr == NULL)
1293 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1295 __atomic_exchange (target_ptr, operand1, &orig_value,
1296 /* memmodel */ __ATOMIC_SEQ_CST);
1301 orig_value = *target_ptr;
1302 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1304 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1306 /* success_memmodel */ __ATOMIC_SEQ_CST,
1307 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1310 orig_value = __atomic_fetch_sub (target_ptr, *operand1,
1314 orig_value = __atomic_fetch_add (target_ptr, (uint64_t) 1,
1318 orig_value = __atomic_fetch_sub (target_ptr, (uint64_t) 1,
1323 if (fetch_ptr != NULL)
1324 *fetch_ptr = orig_value;
1329 F_type * restrict fetch_ptr,
1330 upc_op_num_t op_num,
1331 shared F_type * restrict target,
1332 F_type * restrict operand1 __attribute__((unused)),
1333 F_type * restrict operand2 __attribute__((unused)))
1335 F_type orig_value __attribute__((unused));
1336 F_type new_value __attribute__((unused));
1337 F_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1343 orig_value = *target_ptr;
1344 new_value = orig_value + *operand1;
1346 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1348 /* success_memmodel */ __ATOMIC_SEQ_CST,
1349 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1354 orig_value = *target_ptr;
1355 new_value = orig_value * *operand1;
1357 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1359 /* success_memmodel */ __ATOMIC_SEQ_CST,
1360 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1365 orig_value = *target_ptr;
1366 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
1368 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1370 /* success_memmodel */ __ATOMIC_SEQ_CST,
1371 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1376 orig_value = *target_ptr;
1377 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
1379 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1381 /* success_memmodel */ __ATOMIC_SEQ_CST,
1382 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1385 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1388 if (fetch_ptr == NULL)
1389 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1391 __atomic_exchange (target_ptr, operand1, &orig_value,
1392 /* memmodel */ __ATOMIC_SEQ_CST);
1397 orig_value = *target_ptr;
1398 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1400 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1402 /* success_memmodel */ __ATOMIC_SEQ_CST,
1403 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1408 orig_value = *target_ptr;
1409 new_value = orig_value - *operand1;
1411 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1413 /* success_memmodel */ __ATOMIC_SEQ_CST,
1414 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1419 orig_value = *target_ptr;
1420 new_value = orig_value + (float) 1;
1422 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1424 /* success_memmodel */ __ATOMIC_SEQ_CST,
1425 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1430 orig_value = *target_ptr;
1431 new_value = orig_value - (float) 1;
1433 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1435 /* success_memmodel */ __ATOMIC_SEQ_CST,
1436 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1440 if (fetch_ptr != NULL)
1441 *fetch_ptr = orig_value;
1446 D_type * restrict fetch_ptr,
1447 upc_op_num_t op_num,
1448 shared D_type * restrict target,
1449 D_type * restrict operand1 __attribute__((unused)),
1450 D_type * restrict operand2 __attribute__((unused)))
1452 D_type orig_value __attribute__((unused));
1453 D_type new_value __attribute__((unused));
1454 D_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1460 orig_value = *target_ptr;
1461 new_value = orig_value + *operand1;
1463 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1465 /* success_memmodel */ __ATOMIC_SEQ_CST,
1466 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1471 orig_value = *target_ptr;
1472 new_value = orig_value * *operand1;
1474 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1476 /* success_memmodel */ __ATOMIC_SEQ_CST,
1477 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1482 orig_value = *target_ptr;
1483 new_value = (*operand1 < orig_value) ? *operand1 : orig_value;
1485 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1487 /* success_memmodel */ __ATOMIC_SEQ_CST,
1488 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1493 orig_value = *target_ptr;
1494 new_value = (*operand1 > orig_value) ? *operand1 : orig_value;
1496 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1498 /* success_memmodel */ __ATOMIC_SEQ_CST,
1499 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1502 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1505 if (fetch_ptr == NULL)
1506 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1508 __atomic_exchange (target_ptr, operand1, &orig_value,
1509 /* memmodel */ __ATOMIC_SEQ_CST);
1514 orig_value = *target_ptr;
1515 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1517 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1519 /* success_memmodel */ __ATOMIC_SEQ_CST,
1520 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1525 orig_value = *target_ptr;
1526 new_value = orig_value - *operand1;
1528 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1530 /* success_memmodel */ __ATOMIC_SEQ_CST,
1531 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1536 orig_value = *target_ptr;
1537 new_value = orig_value + (double) 1;
1539 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1541 /* success_memmodel */ __ATOMIC_SEQ_CST,
1542 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1547 orig_value = *target_ptr;
1548 new_value = orig_value - (double) 1;
1550 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1552 /* success_memmodel */ __ATOMIC_SEQ_CST,
1553 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1557 if (fetch_ptr != NULL)
1558 *fetch_ptr = orig_value;
1563 PTS_type * restrict fetch_ptr,
1564 upc_op_num_t op_num,
1565 shared PTS_type * restrict target,
1566 PTS_type * restrict operand1 __attribute__((unused)),
1567 PTS_type * restrict operand2 __attribute__((unused)))
1569 PTS_type orig_value __attribute__((unused));
1570 PTS_type new_value __attribute__((unused));
1571 PTS_type *target_ptr = __cvtaddr (*(upc_shared_ptr_t *)&target);
1575 __atomic_load (target_ptr, &orig_value, __ATOMIC_SEQ_CST);
1578 if (fetch_ptr == NULL)
1579 __atomic_store (target_ptr, operand1, __ATOMIC_SEQ_CST);
1581 __atomic_exchange (target_ptr, operand1, &orig_value,
1582 /* memmodel */ __ATOMIC_SEQ_CST);
1587 orig_value = *target_ptr;
1588 new_value = (orig_value == *operand1) ? *operand2 : orig_value;
1590 while (!__atomic_compare_exchange (target_ptr, &orig_value, &new_value,
1592 /* success_memmodel */ __ATOMIC_SEQ_CST,
1593 /* failure_memmodel */ __ATOMIC_SEQ_CST));
1597 if (fetch_ptr != NULL)
1598 *fetch_ptr = orig_value;
1602 * UPC atomic relaxed operation.
1604 * @param [in] domain Atomic domain
1605 * @param [in] fetch_ptr Target of the update
1606 * @param [in] op Atomic operation
1607 * @param [in] target Target address of the operation
1608 * @param [in] operand1 Operation required argument
1609 * @param [in] operand2 Operation required argument
1611 * @ingroup UPCATOMIC UPC Atomic Functions
1614 upc_atomic_relaxed (upc_atomicdomain_t *domain,
1615 void * restrict fetch_ptr,
1617 shared void * restrict target,
1618 const void * restrict operand1,
1619 const void * restrict operand2)
1621 struct upc_atomicdomain_struct *ldomain =
1622 (struct upc_atomicdomain_struct *) &domain[MYTHREAD];
1623 upc_op_num_t op_num;
1625 __upc_fatal ("atomic operation (0x%llx) may have only "
1626 "a single bit set", (long long)op);
1627 if (!__upc_atomic_is_valid_op (op))
1628 __upc_fatal ("invalid atomic operation (0x%llx)",
1630 op_num = __upc_atomic_op_num (op);
1631 if (op & ~ldomain->ops)
1632 __upc_fatal ("invalid operation (%s) for specified domain",
1633 __upc_atomic_op_name (op_num));
1634 __upc_atomic_check_operands (op_num, fetch_ptr, operand1, operand2);
1635 switch (ldomain->optype)
1639 (I_type *) fetch_ptr,
1641 (shared I_type *) target,
1642 (I_type *) operand1,
1643 (I_type *) operand2);
1647 (UI_type *) fetch_ptr,
1649 (shared UI_type *) target,
1650 (UI_type *) operand1,
1651 (UI_type *) operand2);
1655 (L_type *) fetch_ptr,
1657 (shared L_type *) target,
1658 (L_type *) operand1,
1659 (L_type *) operand2);
1663 (UL_type *) fetch_ptr,
1665 (shared UL_type *) target,
1666 (UL_type *) operand1,
1667 (UL_type *) operand2);
1671 (LL_type *) fetch_ptr,
1673 (shared LL_type *) target,
1674 (LL_type *) operand1,
1675 (LL_type *) operand2);
1679 (ULL_type *) fetch_ptr,
1681 (shared ULL_type *) target,
1682 (ULL_type *) operand1,
1683 (ULL_type *) operand2);
1687 (I32_type *) fetch_ptr,
1689 (shared I32_type *) target,
1690 (I32_type *) operand1,
1691 (I32_type *) operand2);
1695 (UI32_type *) fetch_ptr,
1697 (shared UI32_type *) target,
1698 (UI32_type *) operand1,
1699 (UI32_type *) operand2);
1703 (I64_type *) fetch_ptr,
1705 (shared I64_type *) target,
1706 (I64_type *) operand1,
1707 (I64_type *) operand2);
1711 (UI64_type *) fetch_ptr,
1713 (shared UI64_type *) target,
1714 (UI64_type *) operand1,
1715 (UI64_type *) operand2);
1719 (F_type *) fetch_ptr,
1721 (shared F_type *) target,
1722 (F_type *) operand1,
1723 (F_type *) operand2);
1727 (D_type *) fetch_ptr,
1729 (shared D_type *) target,
1730 (D_type *) operand1,
1731 (D_type *) operand2);
1735 (PTS_type *) fetch_ptr,
1737 (shared PTS_type *) target,
1738 (PTS_type *) operand1,
1739 (PTS_type *) operand2);
1745 * UPC atomic strict operation.
1747 * @param [in] domain Atomic domain
1748 * @param [in] fetch_ptr Target of the update
1749 * @param [in] op Atomic operation
1750 * @param [in] target Target address of the operation
1751 * @param [in] operand1 Operation required argument
1752 * @param [in] operand2 Operation required argument
1754 * @ingroup UPCATOMIC UPC Atomic Functions
1757 upc_atomic_strict (upc_atomicdomain_t *domain,
1758 void * restrict fetch_ptr,
1760 shared void * restrict target,
1761 const void * restrict operand1,
1762 const void * restrict operand2)
1765 upc_atomic_relaxed (domain, fetch_ptr, op, target, operand1, operand2);
1770 * Collective allocation of atomic domain.
1772 * Implementation uses native Portals4 atomic functions and the
1773 * hint field is ignored.
1775 * @parm [in] type Atomic operation type
1776 * @parm [in] ops Atomic domain operations
1777 * @parm [in] hints Atomic operation hint
1778 * @retval Allocated atomic domain pointer
1780 * @ingroup UPCATOMIC UPC Atomic Functions
1782 upc_atomicdomain_t *
1783 upc_all_atomicdomain_alloc (upc_type_t type,
1785 __attribute__((unused)) upc_atomichint_t hints)
1787 upc_atomicdomain_t *domain;
1788 struct upc_atomicdomain_struct *ldomain;
1789 upc_op_t supported_ops;
1790 if (!__upc_atomic_is_valid_type (type))
1791 __upc_fatal ("unsupported atomic type: 0x%llx",
1793 supported_ops = __upc_atomic_supported_ops (type);
1794 if ((ops & ~supported_ops) != 0)
1795 __upc_fatal ("one/more requested atomic operations (0x%llx) unsupported "
1796 "for type `%s'", (long long) ops,
1797 __upc_atomic_type_name (type));
1798 domain = (upc_atomicdomain_t *)
1799 upc_all_alloc (THREADS, sizeof (struct upc_atomicdomain_struct));
1801 __upc_fatal ("unable to allocate atomic domain");
1802 ldomain = (struct upc_atomicdomain_struct *)&domain[MYTHREAD];
1804 ldomain->optype = type;
1809 * Collective free of the atomic domain.
1811 * @param [in] domain Pointer to atomic domain
1813 * @ingroup UPCATOMIC UPC Atomic Functions
1816 upc_all_atomicdomain_free (upc_atomicdomain_t * domain)
1818 assert (domain != NULL);
1828 * Query implementation for expected performance.
1830 * @parm [in] ops Atomic domain operations
1831 * @parm [in] optype Atomic operation type
1832 * @parm [in] addr Atomic address
1833 * @retval Expected performance
1835 * @ingroup UPCATOMIC UPC Atomic Functions
1838 upc_atomic_query (__attribute__((unused)) upc_type_t optype,
1839 __attribute__((unused)) upc_op_t ops,
1840 __attribute__((unused)) shared void *addr)
1842 /* We could make the distinction that only operations
1843 directly supported by the builtin atomics are "fast",
1844 but for now ... everything in the SMP runtime is
1845 defined to be fast. */
1846 return UPC_ATOMIC_PERFORMANCE_FAST;