Update changelog entry.
[official-gcc.git] / gcc / omp-general.c
blob99d8226ef213d18cd3ffdbf092494d58c6fd2a51
1 /* General types and functions that are uselful for processing of OpenMP,
2 OpenACC and similar directivers at various stages of compilation.
4 Copyright (C) 2005-2018 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Find an OMP clause of type KIND within CLAUSES. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "ssa.h"
32 #include "diagnostic-core.h"
33 #include "fold-const.h"
34 #include "langhooks.h"
35 #include "omp-general.h"
36 #include "stringpool.h"
37 #include "attribs.h"
39 enum omp_requires omp_requires_mask;
41 tree
42 omp_find_clause (tree clauses, enum omp_clause_code kind)
44 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
45 if (OMP_CLAUSE_CODE (clauses) == kind)
46 return clauses;
48 return NULL_TREE;
51 /* Return true if DECL is a reference type. */
53 bool
54 omp_is_reference (tree decl)
56 return lang_hooks.decls.omp_privatize_by_reference (decl);
59 /* Adjust *COND_CODE and *N2 so that the former is either LT_EXPR or
60 GT_EXPR. */
62 void
63 omp_adjust_for_condition (location_t loc, enum tree_code *cond_code, tree *n2)
65 switch (*cond_code)
67 case LT_EXPR:
68 case GT_EXPR:
69 case NE_EXPR:
70 break;
71 case LE_EXPR:
72 if (POINTER_TYPE_P (TREE_TYPE (*n2)))
73 *n2 = fold_build_pointer_plus_hwi_loc (loc, *n2, 1);
74 else
75 *n2 = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (*n2), *n2,
76 build_int_cst (TREE_TYPE (*n2), 1));
77 *cond_code = LT_EXPR;
78 break;
79 case GE_EXPR:
80 if (POINTER_TYPE_P (TREE_TYPE (*n2)))
81 *n2 = fold_build_pointer_plus_hwi_loc (loc, *n2, -1);
82 else
83 *n2 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (*n2), *n2,
84 build_int_cst (TREE_TYPE (*n2), 1));
85 *cond_code = GT_EXPR;
86 break;
87 default:
88 gcc_unreachable ();
92 /* Return the looping step from INCR, extracted from the step of a gimple omp
93 for statement. */
95 tree
96 omp_get_for_step_from_incr (location_t loc, tree incr)
98 tree step;
99 switch (TREE_CODE (incr))
101 case PLUS_EXPR:
102 step = TREE_OPERAND (incr, 1);
103 break;
104 case POINTER_PLUS_EXPR:
105 step = fold_convert (ssizetype, TREE_OPERAND (incr, 1));
106 break;
107 case MINUS_EXPR:
108 step = TREE_OPERAND (incr, 1);
109 step = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (step), step);
110 break;
111 default:
112 gcc_unreachable ();
114 return step;
117 /* Extract the header elements of parallel loop FOR_STMT and store
118 them into *FD. */
120 void
121 omp_extract_for_data (gomp_for *for_stmt, struct omp_for_data *fd,
122 struct omp_for_data_loop *loops)
124 tree t, var, *collapse_iter, *collapse_count;
125 tree count = NULL_TREE, iter_type = long_integer_type_node;
126 struct omp_for_data_loop *loop;
127 int i;
128 struct omp_for_data_loop dummy_loop;
129 location_t loc = gimple_location (for_stmt);
130 bool simd = gimple_omp_for_kind (for_stmt) & GF_OMP_FOR_SIMD;
131 bool distribute = gimple_omp_for_kind (for_stmt)
132 == GF_OMP_FOR_KIND_DISTRIBUTE;
133 bool taskloop = gimple_omp_for_kind (for_stmt)
134 == GF_OMP_FOR_KIND_TASKLOOP;
135 tree iterv, countv;
137 fd->for_stmt = for_stmt;
138 fd->pre = NULL;
139 fd->have_nowait = distribute || simd;
140 fd->have_ordered = false;
141 fd->have_reductemp = false;
142 fd->tiling = NULL_TREE;
143 fd->collapse = 1;
144 fd->ordered = 0;
145 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
146 fd->sched_modifiers = 0;
147 fd->chunk_size = NULL_TREE;
148 fd->simd_schedule = false;
149 collapse_iter = NULL;
150 collapse_count = NULL;
152 for (t = gimple_omp_for_clauses (for_stmt); t ; t = OMP_CLAUSE_CHAIN (t))
153 switch (OMP_CLAUSE_CODE (t))
155 case OMP_CLAUSE_NOWAIT:
156 fd->have_nowait = true;
157 break;
158 case OMP_CLAUSE_ORDERED:
159 fd->have_ordered = true;
160 if (OMP_CLAUSE_ORDERED_EXPR (t))
161 fd->ordered = tree_to_shwi (OMP_CLAUSE_ORDERED_EXPR (t));
162 break;
163 case OMP_CLAUSE_SCHEDULE:
164 gcc_assert (!distribute && !taskloop);
165 fd->sched_kind
166 = (enum omp_clause_schedule_kind)
167 (OMP_CLAUSE_SCHEDULE_KIND (t) & OMP_CLAUSE_SCHEDULE_MASK);
168 fd->sched_modifiers = (OMP_CLAUSE_SCHEDULE_KIND (t)
169 & ~OMP_CLAUSE_SCHEDULE_MASK);
170 fd->chunk_size = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (t);
171 fd->simd_schedule = OMP_CLAUSE_SCHEDULE_SIMD (t);
172 break;
173 case OMP_CLAUSE_DIST_SCHEDULE:
174 gcc_assert (distribute);
175 fd->chunk_size = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (t);
176 break;
177 case OMP_CLAUSE_COLLAPSE:
178 fd->collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (t));
179 if (fd->collapse > 1)
181 collapse_iter = &OMP_CLAUSE_COLLAPSE_ITERVAR (t);
182 collapse_count = &OMP_CLAUSE_COLLAPSE_COUNT (t);
184 break;
185 case OMP_CLAUSE_TILE:
186 fd->tiling = OMP_CLAUSE_TILE_LIST (t);
187 fd->collapse = list_length (fd->tiling);
188 gcc_assert (fd->collapse);
189 collapse_iter = &OMP_CLAUSE_TILE_ITERVAR (t);
190 collapse_count = &OMP_CLAUSE_TILE_COUNT (t);
191 break;
192 case OMP_CLAUSE__REDUCTEMP_:
193 fd->have_reductemp = true;
194 default:
195 break;
198 if (fd->collapse > 1 || fd->tiling)
199 fd->loops = loops;
200 else
201 fd->loops = &fd->loop;
203 if (fd->ordered && fd->collapse == 1 && loops != NULL)
205 fd->loops = loops;
206 iterv = NULL_TREE;
207 countv = NULL_TREE;
208 collapse_iter = &iterv;
209 collapse_count = &countv;
212 /* FIXME: for now map schedule(auto) to schedule(static).
213 There should be analysis to determine whether all iterations
214 are approximately the same amount of work (then schedule(static)
215 is best) or if it varies (then schedule(dynamic,N) is better). */
216 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_AUTO)
218 fd->sched_kind = OMP_CLAUSE_SCHEDULE_STATIC;
219 gcc_assert (fd->chunk_size == NULL);
221 gcc_assert ((fd->collapse == 1 && !fd->tiling) || collapse_iter != NULL);
222 if (taskloop)
223 fd->sched_kind = OMP_CLAUSE_SCHEDULE_RUNTIME;
224 if (fd->sched_kind == OMP_CLAUSE_SCHEDULE_RUNTIME)
225 gcc_assert (fd->chunk_size == NULL);
226 else if (fd->chunk_size == NULL)
228 /* We only need to compute a default chunk size for ordered
229 static loops and dynamic loops. */
230 if (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
231 || fd->have_ordered)
232 fd->chunk_size = (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC)
233 ? integer_zero_node : integer_one_node;
236 int cnt = fd->ordered ? fd->ordered : fd->collapse;
237 for (i = 0; i < cnt; i++)
239 if (i == 0
240 && fd->collapse == 1
241 && !fd->tiling
242 && (fd->ordered == 0 || loops == NULL))
243 loop = &fd->loop;
244 else if (loops != NULL)
245 loop = loops + i;
246 else
247 loop = &dummy_loop;
249 loop->v = gimple_omp_for_index (for_stmt, i);
250 gcc_assert (SSA_VAR_P (loop->v));
251 gcc_assert (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
252 || TREE_CODE (TREE_TYPE (loop->v)) == POINTER_TYPE);
253 var = TREE_CODE (loop->v) == SSA_NAME ? SSA_NAME_VAR (loop->v) : loop->v;
254 loop->n1 = gimple_omp_for_initial (for_stmt, i);
256 loop->cond_code = gimple_omp_for_cond (for_stmt, i);
257 loop->n2 = gimple_omp_for_final (for_stmt, i);
258 gcc_assert (loop->cond_code != NE_EXPR
259 || (gimple_omp_for_kind (for_stmt)
260 != GF_OMP_FOR_KIND_OACC_LOOP));
261 omp_adjust_for_condition (loc, &loop->cond_code, &loop->n2);
263 t = gimple_omp_for_incr (for_stmt, i);
264 gcc_assert (TREE_OPERAND (t, 0) == var);
265 loop->step = omp_get_for_step_from_incr (loc, t);
267 if (loop->cond_code == NE_EXPR)
269 gcc_assert (TREE_CODE (loop->step) == INTEGER_CST);
270 if (TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE)
272 if (integer_onep (loop->step))
273 loop->cond_code = LT_EXPR;
274 else
276 gcc_assert (integer_minus_onep (loop->step));
277 loop->cond_code = GT_EXPR;
280 else
282 tree unit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (loop->v)));
283 gcc_assert (TREE_CODE (unit) == INTEGER_CST);
284 if (tree_int_cst_equal (unit, loop->step))
285 loop->cond_code = LT_EXPR;
286 else
288 gcc_assert (wi::neg (wi::to_widest (unit))
289 == wi::to_widest (loop->step));
290 loop->cond_code = GT_EXPR;
295 omp_adjust_for_condition (loc, &loop->cond_code, &loop->n2);
297 if (simd
298 || (fd->sched_kind == OMP_CLAUSE_SCHEDULE_STATIC
299 && !fd->have_ordered))
301 if (fd->collapse == 1 && !fd->tiling)
302 iter_type = TREE_TYPE (loop->v);
303 else if (i == 0
304 || TYPE_PRECISION (iter_type)
305 < TYPE_PRECISION (TREE_TYPE (loop->v)))
306 iter_type
307 = build_nonstandard_integer_type
308 (TYPE_PRECISION (TREE_TYPE (loop->v)), 1);
310 else if (iter_type != long_long_unsigned_type_node)
312 if (POINTER_TYPE_P (TREE_TYPE (loop->v)))
313 iter_type = long_long_unsigned_type_node;
314 else if (TYPE_UNSIGNED (TREE_TYPE (loop->v))
315 && TYPE_PRECISION (TREE_TYPE (loop->v))
316 >= TYPE_PRECISION (iter_type))
318 tree n;
320 if (loop->cond_code == LT_EXPR)
321 n = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (loop->v),
322 loop->n2, loop->step);
323 else
324 n = loop->n1;
325 if (TREE_CODE (n) != INTEGER_CST
326 || tree_int_cst_lt (TYPE_MAX_VALUE (iter_type), n))
327 iter_type = long_long_unsigned_type_node;
329 else if (TYPE_PRECISION (TREE_TYPE (loop->v))
330 > TYPE_PRECISION (iter_type))
332 tree n1, n2;
334 if (loop->cond_code == LT_EXPR)
336 n1 = loop->n1;
337 n2 = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (loop->v),
338 loop->n2, loop->step);
340 else
342 n1 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (loop->v),
343 loop->n2, loop->step);
344 n2 = loop->n1;
346 if (TREE_CODE (n1) != INTEGER_CST
347 || TREE_CODE (n2) != INTEGER_CST
348 || !tree_int_cst_lt (TYPE_MIN_VALUE (iter_type), n1)
349 || !tree_int_cst_lt (n2, TYPE_MAX_VALUE (iter_type)))
350 iter_type = long_long_unsigned_type_node;
354 if (i >= fd->collapse)
355 continue;
357 if (collapse_count && *collapse_count == NULL)
359 t = fold_binary (loop->cond_code, boolean_type_node,
360 fold_convert (TREE_TYPE (loop->v), loop->n1),
361 fold_convert (TREE_TYPE (loop->v), loop->n2));
362 if (t && integer_zerop (t))
363 count = build_zero_cst (long_long_unsigned_type_node);
364 else if ((i == 0 || count != NULL_TREE)
365 && TREE_CODE (TREE_TYPE (loop->v)) == INTEGER_TYPE
366 && TREE_CONSTANT (loop->n1)
367 && TREE_CONSTANT (loop->n2)
368 && TREE_CODE (loop->step) == INTEGER_CST)
370 tree itype = TREE_TYPE (loop->v);
372 if (POINTER_TYPE_P (itype))
373 itype = signed_type_for (itype);
374 t = build_int_cst (itype, (loop->cond_code == LT_EXPR ? -1 : 1));
375 t = fold_build2_loc (loc, PLUS_EXPR, itype,
376 fold_convert_loc (loc, itype, loop->step),
378 t = fold_build2_loc (loc, PLUS_EXPR, itype, t,
379 fold_convert_loc (loc, itype, loop->n2));
380 t = fold_build2_loc (loc, MINUS_EXPR, itype, t,
381 fold_convert_loc (loc, itype, loop->n1));
382 if (TYPE_UNSIGNED (itype) && loop->cond_code == GT_EXPR)
384 tree step = fold_convert_loc (loc, itype, loop->step);
385 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype,
386 fold_build1_loc (loc, NEGATE_EXPR,
387 itype, t),
388 fold_build1_loc (loc, NEGATE_EXPR,
389 itype, step));
391 else
392 t = fold_build2_loc (loc, TRUNC_DIV_EXPR, itype, t,
393 fold_convert_loc (loc, itype,
394 loop->step));
395 t = fold_convert_loc (loc, long_long_unsigned_type_node, t);
396 if (count != NULL_TREE)
397 count = fold_build2_loc (loc, MULT_EXPR,
398 long_long_unsigned_type_node,
399 count, t);
400 else
401 count = t;
402 if (TREE_CODE (count) != INTEGER_CST)
403 count = NULL_TREE;
405 else if (count && !integer_zerop (count))
406 count = NULL_TREE;
410 if (count
411 && !simd
412 && (fd->sched_kind != OMP_CLAUSE_SCHEDULE_STATIC
413 || fd->have_ordered))
415 if (!tree_int_cst_lt (count, TYPE_MAX_VALUE (long_integer_type_node)))
416 iter_type = long_long_unsigned_type_node;
417 else
418 iter_type = long_integer_type_node;
420 else if (collapse_iter && *collapse_iter != NULL)
421 iter_type = TREE_TYPE (*collapse_iter);
422 fd->iter_type = iter_type;
423 if (collapse_iter && *collapse_iter == NULL)
424 *collapse_iter = create_tmp_var (iter_type, ".iter");
425 if (collapse_count && *collapse_count == NULL)
427 if (count)
428 *collapse_count = fold_convert_loc (loc, iter_type, count);
429 else
430 *collapse_count = create_tmp_var (iter_type, ".count");
433 if (fd->collapse > 1 || fd->tiling || (fd->ordered && loops))
435 fd->loop.v = *collapse_iter;
436 fd->loop.n1 = build_int_cst (TREE_TYPE (fd->loop.v), 0);
437 fd->loop.n2 = *collapse_count;
438 fd->loop.step = build_int_cst (TREE_TYPE (fd->loop.v), 1);
439 fd->loop.cond_code = LT_EXPR;
441 else if (loops)
442 loops[0] = fd->loop;
445 /* Build a call to GOMP_barrier. */
447 gimple *
448 omp_build_barrier (tree lhs)
450 tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
451 : BUILT_IN_GOMP_BARRIER);
452 gcall *g = gimple_build_call (fndecl, 0);
453 if (lhs)
454 gimple_call_set_lhs (g, lhs);
455 return g;
458 /* Return maximum possible vectorization factor for the target. */
460 poly_uint64
461 omp_max_vf (void)
463 if (!optimize
464 || optimize_debug
465 || !flag_tree_loop_optimize
466 || (!flag_tree_loop_vectorize
467 && global_options_set.x_flag_tree_loop_vectorize))
468 return 1;
470 auto_vector_sizes sizes;
471 targetm.vectorize.autovectorize_vector_sizes (&sizes);
472 if (!sizes.is_empty ())
474 poly_uint64 vf = 0;
475 for (unsigned int i = 0; i < sizes.length (); ++i)
476 vf = ordered_max (vf, sizes[i]);
477 return vf;
480 machine_mode vqimode = targetm.vectorize.preferred_simd_mode (QImode);
481 if (GET_MODE_CLASS (vqimode) == MODE_VECTOR_INT)
482 return GET_MODE_NUNITS (vqimode);
484 return 1;
487 /* Return maximum SIMT width if offloading may target SIMT hardware. */
490 omp_max_simt_vf (void)
492 if (!optimize)
493 return 0;
494 if (ENABLE_OFFLOADING)
495 for (const char *c = getenv ("OFFLOAD_TARGET_NAMES"); c;)
497 if (!strncmp (c, "nvptx", strlen ("nvptx")))
498 return 32;
499 else if ((c = strchr (c, ',')))
500 c++;
502 return 0;
505 /* Encode an oacc launch argument. This matches the GOMP_LAUNCH_PACK
506 macro on gomp-constants.h. We do not check for overflow. */
508 tree
509 oacc_launch_pack (unsigned code, tree device, unsigned op)
511 tree res;
513 res = build_int_cst (unsigned_type_node, GOMP_LAUNCH_PACK (code, 0, op));
514 if (device)
516 device = fold_build2 (LSHIFT_EXPR, unsigned_type_node,
517 device, build_int_cst (unsigned_type_node,
518 GOMP_LAUNCH_DEVICE_SHIFT));
519 res = fold_build2 (BIT_IOR_EXPR, unsigned_type_node, res, device);
521 return res;
524 /* FIXME: What is the following comment for? */
525 /* Look for compute grid dimension clauses and convert to an attribute
526 attached to FN. This permits the target-side code to (a) massage
527 the dimensions, (b) emit that data and (c) optimize. Non-constant
528 dimensions are pushed onto ARGS.
530 The attribute value is a TREE_LIST. A set of dimensions is
531 represented as a list of INTEGER_CST. Those that are runtime
532 exprs are represented as an INTEGER_CST of zero.
534 TODO: Normally the attribute will just contain a single such list. If
535 however it contains a list of lists, this will represent the use of
536 device_type. Each member of the outer list is an assoc list of
537 dimensions, keyed by the device type. The first entry will be the
538 default. Well, that's the plan. */
540 /* Replace any existing oacc fn attribute with updated dimensions. */
542 void
543 oacc_replace_fn_attrib (tree fn, tree dims)
545 tree ident = get_identifier (OACC_FN_ATTRIB);
546 tree attribs = DECL_ATTRIBUTES (fn);
548 /* If we happen to be present as the first attrib, drop it. */
549 if (attribs && TREE_PURPOSE (attribs) == ident)
550 attribs = TREE_CHAIN (attribs);
551 DECL_ATTRIBUTES (fn) = tree_cons (ident, dims, attribs);
554 /* Scan CLAUSES for launch dimensions and attach them to the oacc
555 function attribute. Push any that are non-constant onto the ARGS
556 list, along with an appropriate GOMP_LAUNCH_DIM tag. */
558 void
559 oacc_set_fn_attrib (tree fn, tree clauses, vec<tree> *args)
561 /* Must match GOMP_DIM ordering. */
562 static const omp_clause_code ids[]
563 = { OMP_CLAUSE_NUM_GANGS, OMP_CLAUSE_NUM_WORKERS,
564 OMP_CLAUSE_VECTOR_LENGTH };
565 unsigned ix;
566 tree dims[GOMP_DIM_MAX];
568 tree attr = NULL_TREE;
569 unsigned non_const = 0;
571 for (ix = GOMP_DIM_MAX; ix--;)
573 tree clause = omp_find_clause (clauses, ids[ix]);
574 tree dim = NULL_TREE;
576 if (clause)
577 dim = OMP_CLAUSE_EXPR (clause, ids[ix]);
578 dims[ix] = dim;
579 if (dim && TREE_CODE (dim) != INTEGER_CST)
581 dim = integer_zero_node;
582 non_const |= GOMP_DIM_MASK (ix);
584 attr = tree_cons (NULL_TREE, dim, attr);
587 oacc_replace_fn_attrib (fn, attr);
589 if (non_const)
591 /* Push a dynamic argument set. */
592 args->safe_push (oacc_launch_pack (GOMP_LAUNCH_DIM,
593 NULL_TREE, non_const));
594 for (unsigned ix = 0; ix != GOMP_DIM_MAX; ix++)
595 if (non_const & GOMP_DIM_MASK (ix))
596 args->safe_push (dims[ix]);
600 /* Process the routine's dimension clauess to generate an attribute
601 value. Issue diagnostics as appropriate. We default to SEQ
602 (OpenACC 2.5 clarifies this). All dimensions have a size of zero
603 (dynamic). TREE_PURPOSE is set to indicate whether that dimension
604 can have a loop partitioned on it. non-zero indicates
605 yes, zero indicates no. By construction once a non-zero has been
606 reached, further inner dimensions must also be non-zero. We set
607 TREE_VALUE to zero for the dimensions that may be partitioned and
608 1 for the other ones -- if a loop is (erroneously) spawned at
609 an outer level, we don't want to try and partition it. */
611 tree
612 oacc_build_routine_dims (tree clauses)
614 /* Must match GOMP_DIM ordering. */
615 static const omp_clause_code ids[]
616 = {OMP_CLAUSE_GANG, OMP_CLAUSE_WORKER, OMP_CLAUSE_VECTOR, OMP_CLAUSE_SEQ};
617 int ix;
618 int level = -1;
620 for (; clauses; clauses = OMP_CLAUSE_CHAIN (clauses))
621 for (ix = GOMP_DIM_MAX + 1; ix--;)
622 if (OMP_CLAUSE_CODE (clauses) == ids[ix])
624 if (level >= 0)
625 error_at (OMP_CLAUSE_LOCATION (clauses),
626 "multiple loop axes specified for routine");
627 level = ix;
628 break;
631 /* Default to SEQ. */
632 if (level < 0)
633 level = GOMP_DIM_MAX;
635 tree dims = NULL_TREE;
637 for (ix = GOMP_DIM_MAX; ix--;)
638 dims = tree_cons (build_int_cst (boolean_type_node, ix >= level),
639 build_int_cst (integer_type_node, ix < level), dims);
641 return dims;
644 /* Retrieve the oacc function attrib and return it. Non-oacc
645 functions will return NULL. */
647 tree
648 oacc_get_fn_attrib (tree fn)
650 return lookup_attribute (OACC_FN_ATTRIB, DECL_ATTRIBUTES (fn));
653 /* Return true if FN is an OpenMP or OpenACC offloading function. */
655 bool
656 offloading_function_p (tree fn)
658 tree attrs = DECL_ATTRIBUTES (fn);
659 return (lookup_attribute ("omp declare target", attrs)
660 || lookup_attribute ("omp target entrypoint", attrs));
663 /* Extract an oacc execution dimension from FN. FN must be an
664 offloaded function or routine that has already had its execution
665 dimensions lowered to the target-specific values. */
668 oacc_get_fn_dim_size (tree fn, int axis)
670 tree attrs = oacc_get_fn_attrib (fn);
672 gcc_assert (axis < GOMP_DIM_MAX);
674 tree dims = TREE_VALUE (attrs);
675 while (axis--)
676 dims = TREE_CHAIN (dims);
678 int size = TREE_INT_CST_LOW (TREE_VALUE (dims));
680 return size;
683 /* Extract the dimension axis from an IFN_GOACC_DIM_POS or
684 IFN_GOACC_DIM_SIZE call. */
687 oacc_get_ifn_dim_arg (const gimple *stmt)
689 gcc_checking_assert (gimple_call_internal_fn (stmt) == IFN_GOACC_DIM_SIZE
690 || gimple_call_internal_fn (stmt) == IFN_GOACC_DIM_POS);
691 tree arg = gimple_call_arg (stmt, 0);
692 HOST_WIDE_INT axis = TREE_INT_CST_LOW (arg);
694 gcc_checking_assert (axis >= 0 && axis < GOMP_DIM_MAX);
695 return (int) axis;