xfail scan-tree-dump-not throw in g++.dg/pr99966.C on hppa*64*-*-*
[official-gcc.git] / gcc / c-family / c-omp.cc
blob5117022e330c95592d7731eec161ab1b5c6925d9
1 /* This file contains routines to construct OpenACC and OpenMP constructs,
2 called from parsing in the C and C++ front ends.
4 Copyright (C) 2005-2024 Free Software Foundation, Inc.
5 Contributed by Richard Henderson <rth@redhat.com>,
6 Diego Novillo <dnovillo@redhat.com>.
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "options.h"
28 #include "c-common.h"
29 #include "gimple-expr.h"
30 #include "c-pragma.h"
31 #include "stringpool.h"
32 #include "omp-general.h"
33 #include "gomp-constants.h"
34 #include "memmodel.h"
35 #include "attribs.h"
36 #include "gimplify.h"
37 #include "langhooks.h"
38 #include "bitmap.h"
39 #include "tree-iterator.h"
42 /* Complete a #pragma oacc wait construct. LOC is the location of
43 the #pragma. */
45 tree
46 c_finish_oacc_wait (location_t loc, tree parms, tree clauses)
48 const int nparms = list_length (parms);
49 tree stmt, t;
50 vec<tree, va_gc> *args;
52 vec_alloc (args, nparms + 2);
53 stmt = builtin_decl_explicit (BUILT_IN_GOACC_WAIT);
55 if (omp_find_clause (clauses, OMP_CLAUSE_ASYNC))
56 t = OMP_CLAUSE_ASYNC_EXPR (clauses);
57 else
58 t = build_int_cst (integer_type_node, GOMP_ASYNC_SYNC);
60 args->quick_push (t);
61 args->quick_push (build_int_cst (integer_type_node, nparms));
63 for (t = parms; t; t = TREE_CHAIN (t))
65 if (TREE_CODE (OMP_CLAUSE_WAIT_EXPR (t)) == INTEGER_CST)
66 args->quick_push (build_int_cst (integer_type_node,
67 TREE_INT_CST_LOW (OMP_CLAUSE_WAIT_EXPR (t))));
68 else
69 args->quick_push (OMP_CLAUSE_WAIT_EXPR (t));
72 stmt = build_call_expr_loc_vec (loc, stmt, args);
74 vec_free (args);
76 return stmt;
79 /* Complete a #pragma omp master construct. STMT is the structured-block
80 that follows the pragma. LOC is the location of the #pragma. */
82 tree
83 c_finish_omp_master (location_t loc, tree stmt)
85 tree t = add_stmt (build1 (OMP_MASTER, void_type_node, stmt));
86 SET_EXPR_LOCATION (t, loc);
87 return t;
90 /* Complete a #pragma omp masked construct. BODY is the structured-block
91 that follows the pragma. LOC is the location of the #pragma. */
93 tree
94 c_finish_omp_masked (location_t loc, tree body, tree clauses)
96 tree stmt = make_node (OMP_MASKED);
97 TREE_TYPE (stmt) = void_type_node;
98 OMP_MASKED_BODY (stmt) = body;
99 OMP_MASKED_CLAUSES (stmt) = clauses;
100 SET_EXPR_LOCATION (stmt, loc);
101 return add_stmt (stmt);
104 /* Complete a #pragma omp taskgroup construct. BODY is the structured-block
105 that follows the pragma. LOC is the location of the #pragma. */
107 tree
108 c_finish_omp_taskgroup (location_t loc, tree body, tree clauses)
110 tree stmt = make_node (OMP_TASKGROUP);
111 TREE_TYPE (stmt) = void_type_node;
112 OMP_TASKGROUP_BODY (stmt) = body;
113 OMP_TASKGROUP_CLAUSES (stmt) = clauses;
114 SET_EXPR_LOCATION (stmt, loc);
115 return add_stmt (stmt);
118 /* Complete a #pragma omp critical construct. BODY is the structured-block
119 that follows the pragma, NAME is the identifier in the pragma, or null
120 if it was omitted. LOC is the location of the #pragma. */
122 tree
123 c_finish_omp_critical (location_t loc, tree body, tree name, tree clauses)
125 gcc_assert (!clauses || OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT);
126 if (name == NULL_TREE
127 && clauses != NULL_TREE
128 && integer_nonzerop (OMP_CLAUSE_HINT_EXPR (clauses)))
130 error_at (OMP_CLAUSE_LOCATION (clauses),
131 "%<#pragma omp critical%> with %<hint%> clause requires "
132 "a name, except when %<omp_sync_hint_none%> is used");
133 return error_mark_node;
136 tree stmt = make_node (OMP_CRITICAL);
137 TREE_TYPE (stmt) = void_type_node;
138 OMP_CRITICAL_BODY (stmt) = body;
139 OMP_CRITICAL_NAME (stmt) = name;
140 OMP_CRITICAL_CLAUSES (stmt) = clauses;
141 SET_EXPR_LOCATION (stmt, loc);
142 return add_stmt (stmt);
145 /* Complete a #pragma omp ordered construct. STMT is the structured-block
146 that follows the pragma. LOC is the location of the #pragma. */
148 tree
149 c_finish_omp_ordered (location_t loc, tree clauses, tree stmt)
151 tree t = make_node (OMP_ORDERED);
152 TREE_TYPE (t) = void_type_node;
153 OMP_ORDERED_BODY (t) = stmt;
154 if (!flag_openmp /* flag_openmp_simd */
155 && (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_SIMD
156 || OMP_CLAUSE_CHAIN (clauses)))
157 clauses = build_omp_clause (loc, OMP_CLAUSE_SIMD);
158 OMP_ORDERED_CLAUSES (t) = clauses;
159 SET_EXPR_LOCATION (t, loc);
160 return add_stmt (t);
164 /* Complete a #pragma omp barrier construct. LOC is the location of
165 the #pragma. */
167 void
168 c_finish_omp_barrier (location_t loc)
170 tree x;
172 x = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
173 x = build_call_expr_loc (loc, x, 0);
174 add_stmt (x);
178 /* Complete a #pragma omp taskwait construct. LOC is the location of the
179 pragma. */
181 void
182 c_finish_omp_taskwait (location_t loc)
184 tree x;
186 x = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
187 x = build_call_expr_loc (loc, x, 0);
188 add_stmt (x);
192 /* Complete a #pragma omp taskyield construct. LOC is the location of the
193 pragma. */
195 void
196 c_finish_omp_taskyield (location_t loc)
198 tree x;
200 x = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
201 x = build_call_expr_loc (loc, x, 0);
202 add_stmt (x);
206 /* Complete a #pragma omp atomic construct. For CODE OMP_ATOMIC
207 the expression to be implemented atomically is LHS opcode= RHS.
208 For OMP_ATOMIC_READ V = LHS, for OMP_ATOMIC_CAPTURE_{NEW,OLD} LHS
209 opcode= RHS with the new or old content of LHS returned.
210 LOC is the location of the atomic statement. The value returned
211 is either error_mark_node (if the construct was erroneous) or an
212 OMP_ATOMIC* node which should be added to the current statement
213 tree with add_stmt. If TEST is set, avoid calling save_expr
214 or create_tmp_var*. */
216 tree
217 c_finish_omp_atomic (location_t loc, enum tree_code code,
218 enum tree_code opcode, tree lhs, tree rhs,
219 tree v, tree lhs1, tree rhs1, tree r, bool swapped,
220 enum omp_memory_order memory_order, bool weak,
221 bool test)
223 tree x, type, addr, pre = NULL_TREE, rtmp = NULL_TREE, vtmp = NULL_TREE;
224 HOST_WIDE_INT bitpos = 0, bitsize = 0;
225 enum tree_code orig_opcode = opcode;
227 if (lhs == error_mark_node || rhs == error_mark_node
228 || v == error_mark_node || lhs1 == error_mark_node
229 || rhs1 == error_mark_node || r == error_mark_node)
230 return error_mark_node;
232 /* ??? According to one reading of the OpenMP spec, complex type are
233 supported, but there are no atomic stores for any architecture.
234 But at least icc 9.0 doesn't support complex types here either.
235 And lets not even talk about vector types... */
236 type = TREE_TYPE (lhs);
237 if (!INTEGRAL_TYPE_P (type)
238 && !POINTER_TYPE_P (type)
239 && !SCALAR_FLOAT_TYPE_P (type))
241 error_at (loc, "invalid expression type for %<#pragma omp atomic%>");
242 return error_mark_node;
244 if (TYPE_ATOMIC (type))
246 error_at (loc, "%<_Atomic%> expression in %<#pragma omp atomic%>");
247 return error_mark_node;
249 if (r && r != void_list_node && !INTEGRAL_TYPE_P (TREE_TYPE (r)))
251 error_at (loc, "%<#pragma omp atomic compare capture%> with non-integral "
252 "comparison result");
253 return error_mark_node;
256 if (opcode == RDIV_EXPR)
257 opcode = TRUNC_DIV_EXPR;
259 /* ??? Validate that rhs does not overlap lhs. */
260 tree blhs = NULL;
261 if (TREE_CODE (lhs) == COMPONENT_REF
262 && TREE_CODE (TREE_OPERAND (lhs, 1)) == FIELD_DECL
263 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs, 1))
264 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs, 1)))
266 tree field = TREE_OPERAND (lhs, 1);
267 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
268 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
269 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
270 bitpos = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
271 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
272 else
273 bitpos = 0;
274 bitpos += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
275 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
276 gcc_assert (tree_fits_shwi_p (DECL_SIZE (field)));
277 bitsize = tree_to_shwi (DECL_SIZE (field));
278 blhs = lhs;
279 type = TREE_TYPE (repr);
280 lhs = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs, 0),
281 repr, TREE_OPERAND (lhs, 2));
284 /* Take and save the address of the lhs. From then on we'll reference it
285 via indirection. */
286 addr = build_unary_op (loc, ADDR_EXPR, lhs, false);
287 if (addr == error_mark_node)
288 return error_mark_node;
289 if (!test)
290 addr = save_expr (addr);
291 if (!test
292 && TREE_CODE (addr) != SAVE_EXPR
293 && (TREE_CODE (addr) != ADDR_EXPR
294 || !VAR_P (TREE_OPERAND (addr, 0))))
296 /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
297 it even after unsharing function body. */
298 tree var = create_tmp_var_raw (TREE_TYPE (addr));
299 DECL_CONTEXT (var) = current_function_decl;
300 addr = build4 (TARGET_EXPR, TREE_TYPE (addr), var, addr, NULL, NULL);
302 tree orig_lhs = lhs;
303 lhs = build_indirect_ref (loc, addr, RO_NULL);
304 tree new_lhs = lhs;
306 if (code == OMP_ATOMIC_READ)
308 x = build1 (OMP_ATOMIC_READ, type, addr);
309 SET_EXPR_LOCATION (x, loc);
310 OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
311 gcc_assert (!weak);
312 if (blhs)
313 x = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), x,
314 bitsize_int (bitsize), bitsize_int (bitpos));
315 return build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
316 loc, x, NULL_TREE);
319 /* There are lots of warnings, errors, and conversions that need to happen
320 in the course of interpreting a statement. Use the normal mechanisms
321 to do this, and then take it apart again. */
322 if (blhs)
324 lhs = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), lhs,
325 bitsize_int (bitsize), bitsize_int (bitpos));
326 if (opcode == COND_EXPR)
328 bool save = in_late_binary_op;
329 in_late_binary_op = true;
330 std::swap (rhs, rhs1);
331 rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
332 in_late_binary_op = save;
334 else if (swapped)
335 rhs = build_binary_op (loc, opcode, rhs, lhs, true);
336 else if (opcode != NOP_EXPR)
337 rhs = build_binary_op (loc, opcode, lhs, rhs, true);
338 opcode = NOP_EXPR;
340 else if (opcode == COND_EXPR)
342 bool save = in_late_binary_op;
343 in_late_binary_op = true;
344 std::swap (rhs, rhs1);
345 rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
346 in_late_binary_op = save;
347 opcode = NOP_EXPR;
349 else if (swapped)
351 rhs = build_binary_op (loc, opcode, rhs, lhs, true);
352 opcode = NOP_EXPR;
354 bool save = in_late_binary_op;
355 in_late_binary_op = true;
356 if ((opcode == MIN_EXPR || opcode == MAX_EXPR)
357 && build_binary_op (loc, LT_EXPR, blhs ? blhs : lhs, rhs,
358 true) == error_mark_node)
359 x = error_mark_node;
360 else
361 x = build_modify_expr (loc, blhs ? blhs : lhs, NULL_TREE, opcode,
362 loc, rhs, NULL_TREE);
363 in_late_binary_op = save;
364 if (x == error_mark_node)
365 return error_mark_node;
366 if (TREE_CODE (x) == COMPOUND_EXPR)
368 pre = TREE_OPERAND (x, 0);
369 gcc_assert (TREE_CODE (pre) == SAVE_EXPR || tree_invariant_p (pre));
370 x = TREE_OPERAND (x, 1);
372 gcc_assert (TREE_CODE (x) == MODIFY_EXPR);
373 rhs = TREE_OPERAND (x, 1);
375 if (blhs)
376 rhs = build3_loc (loc, BIT_INSERT_EXPR, type, new_lhs,
377 rhs, bitsize_int (bitpos));
378 if (orig_opcode == COND_EXPR)
380 if (error_operand_p (rhs1))
381 return error_mark_node;
382 gcc_assert (TREE_CODE (rhs1) == EQ_EXPR);
383 tree cmptype = TREE_TYPE (TREE_OPERAND (rhs1, 0));
384 if (SCALAR_FLOAT_TYPE_P (cmptype) && !test)
386 bool clear_padding = false;
387 HOST_WIDE_INT non_padding_start = 0;
388 HOST_WIDE_INT non_padding_end = 0;
389 if (BITS_PER_UNIT == 8
390 && CHAR_BIT == 8
391 && clear_padding_type_may_have_padding_p (cmptype))
393 HOST_WIDE_INT sz = int_size_in_bytes (cmptype), i;
394 gcc_assert (sz > 0);
395 unsigned char *buf = XALLOCAVEC (unsigned char, sz);
396 memset (buf, ~0, sz);
397 clear_type_padding_in_mask (cmptype, buf);
398 for (i = 0; i < sz; i++)
399 if (buf[i] != (unsigned char) ~0)
401 clear_padding = true;
402 break;
404 if (clear_padding && buf[i] == 0)
406 /* Try to optimize. In the common case where
407 non-padding bits are all continuous and start
408 and end at a byte boundary, we can just adjust
409 the memcmp call arguments and don't need to
410 emit __builtin_clear_padding calls. */
411 if (i == 0)
413 for (i = 0; i < sz; i++)
414 if (buf[i] != 0)
415 break;
416 if (i < sz && buf[i] == (unsigned char) ~0)
418 non_padding_start = i;
419 for (; i < sz; i++)
420 if (buf[i] != (unsigned char) ~0)
421 break;
423 else
424 i = 0;
426 if (i != 0)
428 non_padding_end = i;
429 for (; i < sz; i++)
430 if (buf[i] != 0)
432 non_padding_start = 0;
433 non_padding_end = 0;
434 break;
439 tree inttype = NULL_TREE;
440 if (!clear_padding && tree_fits_uhwi_p (TYPE_SIZE (cmptype)))
442 HOST_WIDE_INT prec = tree_to_uhwi (TYPE_SIZE (cmptype));
443 inttype = c_common_type_for_size (prec, 1);
444 if (inttype
445 && (!tree_int_cst_equal (TYPE_SIZE (cmptype),
446 TYPE_SIZE (inttype))
447 || TYPE_PRECISION (inttype) != prec))
448 inttype = NULL_TREE;
450 if (inttype)
452 TREE_OPERAND (rhs1, 0)
453 = build1_loc (loc, VIEW_CONVERT_EXPR, inttype,
454 TREE_OPERAND (rhs1, 0));
455 TREE_OPERAND (rhs1, 1)
456 = build1_loc (loc, VIEW_CONVERT_EXPR, inttype,
457 TREE_OPERAND (rhs1, 1));
459 else
461 tree pcmptype = build_pointer_type (cmptype);
462 tree tmp1 = create_tmp_var_raw (cmptype);
463 TREE_ADDRESSABLE (tmp1) = 1;
464 DECL_CONTEXT (tmp1) = current_function_decl;
465 tmp1 = build4 (TARGET_EXPR, cmptype, tmp1,
466 TREE_OPERAND (rhs1, 0), NULL, NULL);
467 tmp1 = build1 (ADDR_EXPR, pcmptype, tmp1);
468 tree tmp2 = create_tmp_var_raw (cmptype);
469 TREE_ADDRESSABLE (tmp2) = 1;
470 DECL_CONTEXT (tmp2) = current_function_decl;
471 tmp2 = build4 (TARGET_EXPR, cmptype, tmp2,
472 TREE_OPERAND (rhs1, 1), NULL, NULL);
473 tmp2 = build1 (ADDR_EXPR, pcmptype, tmp2);
474 if (non_padding_start)
476 tmp1 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp1,
477 size_int (non_padding_start));
478 tmp2 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp2,
479 size_int (non_padding_start));
481 tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
482 rhs1 = build_call_expr_loc (loc, fndecl, 3, tmp1, tmp2,
483 non_padding_end
484 ? size_int (non_padding_end
485 - non_padding_start)
486 : TYPE_SIZE_UNIT (cmptype));
487 rhs1 = build2 (EQ_EXPR, boolean_type_node, rhs1,
488 integer_zero_node);
489 if (clear_padding && non_padding_end == 0)
491 fndecl = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
492 tree cp1 = build_call_expr_loc (loc, fndecl, 1, tmp1);
493 tree cp2 = build_call_expr_loc (loc, fndecl, 1, tmp2);
494 rhs1 = omit_two_operands_loc (loc, boolean_type_node,
495 rhs1, cp2, cp1);
499 if (r && test)
500 rtmp = rhs1;
501 else if (r)
503 tree var = create_tmp_var_raw (boolean_type_node);
504 DECL_CONTEXT (var) = current_function_decl;
505 rtmp = build4 (TARGET_EXPR, boolean_type_node, var,
506 boolean_false_node, NULL, NULL);
507 save = in_late_binary_op;
508 in_late_binary_op = true;
509 x = build_modify_expr (loc, var, NULL_TREE, NOP_EXPR,
510 loc, rhs1, NULL_TREE);
511 in_late_binary_op = save;
512 if (x == error_mark_node)
513 return error_mark_node;
514 gcc_assert (TREE_CODE (x) == MODIFY_EXPR
515 && TREE_OPERAND (x, 0) == var);
516 TREE_OPERAND (x, 0) = rtmp;
517 rhs1 = omit_one_operand_loc (loc, boolean_type_node, x, rtmp);
519 rhs = build3_loc (loc, COND_EXPR, type, rhs1, rhs, new_lhs);
520 rhs1 = NULL_TREE;
523 /* Punt the actual generation of atomic operations to common code. */
524 if (code == OMP_ATOMIC)
525 type = void_type_node;
526 x = build2 (code, type, addr, rhs);
527 SET_EXPR_LOCATION (x, loc);
528 OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
529 OMP_ATOMIC_WEAK (x) = weak;
531 /* Generally it is hard to prove lhs1 and lhs are the same memory
532 location, just diagnose different variables. */
533 if (rhs1
534 && VAR_P (rhs1)
535 && VAR_P (orig_lhs)
536 && rhs1 != orig_lhs
537 && !test)
539 if (code == OMP_ATOMIC)
540 error_at (loc, "%<#pragma omp atomic update%> uses two different "
541 "variables for memory");
542 else
543 error_at (loc, "%<#pragma omp atomic capture%> uses two different "
544 "variables for memory");
545 return error_mark_node;
548 if (lhs1
549 && lhs1 != orig_lhs
550 && TREE_CODE (lhs1) == COMPONENT_REF
551 && TREE_CODE (TREE_OPERAND (lhs1, 1)) == FIELD_DECL
552 && DECL_C_BIT_FIELD (TREE_OPERAND (lhs1, 1))
553 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs1, 1)))
555 tree field = TREE_OPERAND (lhs1, 1);
556 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
557 lhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs1, 0),
558 repr, TREE_OPERAND (lhs1, 2));
560 if (rhs1
561 && rhs1 != orig_lhs
562 && TREE_CODE (rhs1) == COMPONENT_REF
563 && TREE_CODE (TREE_OPERAND (rhs1, 1)) == FIELD_DECL
564 && DECL_C_BIT_FIELD (TREE_OPERAND (rhs1, 1))
565 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (rhs1, 1)))
567 tree field = TREE_OPERAND (rhs1, 1);
568 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
569 rhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (rhs1, 0),
570 repr, TREE_OPERAND (rhs1, 2));
573 if (code != OMP_ATOMIC)
575 /* Generally it is hard to prove lhs1 and lhs are the same memory
576 location, just diagnose different variables. */
577 if (lhs1 && VAR_P (lhs1) && VAR_P (orig_lhs))
579 if (lhs1 != orig_lhs && !test)
581 error_at (loc, "%<#pragma omp atomic capture%> uses two "
582 "different variables for memory");
583 return error_mark_node;
586 if (blhs)
587 x = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), x,
588 bitsize_int (bitsize), bitsize_int (bitpos));
589 if (r && !test)
591 vtmp = create_tmp_var_raw (TREE_TYPE (x));
592 DECL_CONTEXT (vtmp) = current_function_decl;
594 else
595 vtmp = v;
596 x = build_modify_expr (loc, vtmp, NULL_TREE, NOP_EXPR,
597 loc, x, NULL_TREE);
598 if (x == error_mark_node)
599 return error_mark_node;
600 type = TREE_TYPE (x);
601 if (r && !test)
603 vtmp = build4 (TARGET_EXPR, TREE_TYPE (vtmp), vtmp,
604 build_zero_cst (TREE_TYPE (vtmp)), NULL, NULL);
605 gcc_assert (TREE_CODE (x) == MODIFY_EXPR
606 && TREE_OPERAND (x, 0) == TARGET_EXPR_SLOT (vtmp));
607 TREE_OPERAND (x, 0) = vtmp;
609 if (rhs1 && rhs1 != orig_lhs)
611 tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
612 if (rhs1addr == error_mark_node)
613 return error_mark_node;
614 x = omit_one_operand_loc (loc, type, x, rhs1addr);
616 if (lhs1 && lhs1 != orig_lhs)
618 tree lhs1addr = build_unary_op (loc, ADDR_EXPR, lhs1, false);
619 if (lhs1addr == error_mark_node)
620 return error_mark_node;
621 if (code == OMP_ATOMIC_CAPTURE_OLD)
622 x = omit_one_operand_loc (loc, type, x, lhs1addr);
623 else
625 if (!test)
626 x = save_expr (x);
627 x = omit_two_operands_loc (loc, type, x, x, lhs1addr);
631 else if (rhs1 && rhs1 != orig_lhs)
633 tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
634 if (rhs1addr == error_mark_node)
635 return error_mark_node;
636 x = omit_one_operand_loc (loc, type, x, rhs1addr);
639 if (pre)
640 x = omit_one_operand_loc (loc, type, x, pre);
641 if (r && r != void_list_node)
643 in_late_binary_op = true;
644 tree x2 = build_modify_expr (loc, r, NULL_TREE, NOP_EXPR,
645 loc, rtmp, NULL_TREE);
646 in_late_binary_op = save;
647 if (x2 == error_mark_node)
648 return error_mark_node;
649 x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
651 if (v && vtmp != v)
653 in_late_binary_op = true;
654 tree x2 = build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
655 loc, vtmp, NULL_TREE);
656 in_late_binary_op = save;
657 if (x2 == error_mark_node)
658 return error_mark_node;
659 x2 = build3_loc (loc, COND_EXPR, void_type_node, rtmp,
660 void_node, x2);
661 x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
663 return x;
667 /* Return true if TYPE is the implementation's omp_depend_t. */
669 bool
670 c_omp_depend_t_p (tree type)
672 type = TYPE_MAIN_VARIANT (type);
673 return (TREE_CODE (type) == RECORD_TYPE
674 && TYPE_NAME (type)
675 && ((TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
676 ? DECL_NAME (TYPE_NAME (type)) : TYPE_NAME (type))
677 == get_identifier ("omp_depend_t"))
678 && TYPE_FILE_SCOPE_P (type)
679 && COMPLETE_TYPE_P (type)
680 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
681 && !compare_tree_int (TYPE_SIZE (type),
682 2 * tree_to_uhwi (TYPE_SIZE (ptr_type_node))));
686 /* Complete a #pragma omp depobj construct. LOC is the location of the
687 #pragma. */
689 void
690 c_finish_omp_depobj (location_t loc, tree depobj,
691 enum omp_clause_depend_kind kind, tree clause)
693 tree t = NULL_TREE;
694 if (!error_operand_p (depobj))
696 if (!c_omp_depend_t_p (TREE_TYPE (depobj)))
698 error_at (EXPR_LOC_OR_LOC (depobj, loc),
699 "type of %<depobj%> expression is not %<omp_depend_t%>");
700 depobj = error_mark_node;
702 else if (TYPE_READONLY (TREE_TYPE (depobj)))
704 error_at (EXPR_LOC_OR_LOC (depobj, loc),
705 "%<const%> qualified %<depobj%> expression");
706 depobj = error_mark_node;
709 else
710 depobj = error_mark_node;
712 if (clause == error_mark_node)
713 return;
715 if (clause)
717 gcc_assert (TREE_CODE (clause) == OMP_CLAUSE);
718 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_DOACROSS)
720 error_at (OMP_CLAUSE_LOCATION (clause),
721 "%<depend(%s)%> is only allowed in %<omp ordered%>",
722 OMP_CLAUSE_DOACROSS_KIND (clause)
723 == OMP_CLAUSE_DOACROSS_SOURCE
724 ? "source" : "sink");
725 return;
727 gcc_assert (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_DEPEND);
728 if (OMP_CLAUSE_CHAIN (clause))
729 error_at (OMP_CLAUSE_LOCATION (clause),
730 "more than one locator in %<depend%> clause on %<depobj%> "
731 "construct");
732 switch (OMP_CLAUSE_DEPEND_KIND (clause))
734 case OMP_CLAUSE_DEPEND_DEPOBJ:
735 error_at (OMP_CLAUSE_LOCATION (clause),
736 "%<depobj%> dependence type specified in %<depend%> "
737 "clause on %<depobj%> construct");
738 return;
739 case OMP_CLAUSE_DEPEND_IN:
740 case OMP_CLAUSE_DEPEND_OUT:
741 case OMP_CLAUSE_DEPEND_INOUT:
742 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
743 case OMP_CLAUSE_DEPEND_INOUTSET:
744 kind = OMP_CLAUSE_DEPEND_KIND (clause);
745 t = OMP_CLAUSE_DECL (clause);
746 gcc_assert (t);
747 if (TREE_CODE (t) == TREE_LIST
748 && TREE_PURPOSE (t)
749 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
751 error_at (OMP_CLAUSE_LOCATION (clause),
752 "%<iterator%> modifier may not be specified on "
753 "%<depobj%> construct");
754 return;
756 if (TREE_CODE (t) == COMPOUND_EXPR)
758 tree t1 = build_fold_addr_expr (TREE_OPERAND (t, 1));
759 t = build2 (COMPOUND_EXPR, TREE_TYPE (t1), TREE_OPERAND (t, 0),
760 t1);
762 else if (t != null_pointer_node)
763 t = build_fold_addr_expr (t);
764 break;
765 default:
766 gcc_unreachable ();
769 else
770 gcc_assert (kind != OMP_CLAUSE_DEPEND_INVALID);
772 if (depobj == error_mark_node)
773 return;
775 depobj = build_fold_addr_expr_loc (EXPR_LOC_OR_LOC (depobj, loc), depobj);
776 tree dtype
777 = build_pointer_type_for_mode (ptr_type_node, TYPE_MODE (ptr_type_node),
778 true);
779 depobj = fold_convert (dtype, depobj);
780 tree r;
781 if (clause)
783 depobj = save_expr (depobj);
784 r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
785 add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
787 int k;
788 switch (kind)
790 case OMP_CLAUSE_DEPEND_IN:
791 k = GOMP_DEPEND_IN;
792 break;
793 case OMP_CLAUSE_DEPEND_OUT:
794 k = GOMP_DEPEND_OUT;
795 break;
796 case OMP_CLAUSE_DEPEND_INOUT:
797 k = GOMP_DEPEND_INOUT;
798 break;
799 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
800 k = GOMP_DEPEND_MUTEXINOUTSET;
801 break;
802 case OMP_CLAUSE_DEPEND_INOUTSET:
803 k = GOMP_DEPEND_INOUTSET;
804 break;
805 case OMP_CLAUSE_DEPEND_LAST:
806 k = -1;
807 break;
808 default:
809 gcc_unreachable ();
811 t = build_int_cst (ptr_type_node, k);
812 depobj = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (depobj), depobj,
813 TYPE_SIZE_UNIT (ptr_type_node));
814 r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
815 add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
819 /* Complete a #pragma omp flush construct. We don't do anything with
820 the variable list that the syntax allows. LOC is the location of
821 the #pragma. */
823 void
824 c_finish_omp_flush (location_t loc, int mo)
826 tree x;
828 if (mo == MEMMODEL_LAST || mo == MEMMODEL_SEQ_CST)
830 x = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
831 x = build_call_expr_loc (loc, x, 0);
833 else
835 x = builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE);
836 x = build_call_expr_loc (loc, x, 1,
837 build_int_cst (integer_type_node, mo));
839 add_stmt (x);
843 /* Check and canonicalize OMP_FOR increment expression.
844 Helper function for c_finish_omp_for. */
846 static tree
847 check_omp_for_incr_expr (location_t loc, tree exp, tree decl)
849 tree t;
851 if (!INTEGRAL_TYPE_P (TREE_TYPE (exp))
852 || TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (decl)))
853 return error_mark_node;
855 if (exp == decl)
856 return build_int_cst (TREE_TYPE (exp), 0);
858 switch (TREE_CODE (exp))
860 CASE_CONVERT:
861 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
862 if (t != error_mark_node)
863 return fold_convert_loc (loc, TREE_TYPE (exp), t);
864 break;
865 case MINUS_EXPR:
866 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
867 if (t != error_mark_node)
868 return fold_build2_loc (loc, MINUS_EXPR,
869 TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
870 break;
871 case PLUS_EXPR:
872 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
873 if (t != error_mark_node)
874 return fold_build2_loc (loc, PLUS_EXPR,
875 TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
876 t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 1), decl);
877 if (t != error_mark_node)
878 return fold_build2_loc (loc, PLUS_EXPR,
879 TREE_TYPE (exp), TREE_OPERAND (exp, 0), t);
880 break;
881 case COMPOUND_EXPR:
883 /* cp_build_modify_expr forces preevaluation of the RHS to make
884 sure that it is evaluated before the lvalue-rvalue conversion
885 is applied to the LHS. Reconstruct the original expression. */
886 tree op0 = TREE_OPERAND (exp, 0);
887 if (TREE_CODE (op0) == TARGET_EXPR
888 && !VOID_TYPE_P (TREE_TYPE (op0)))
890 tree op1 = TREE_OPERAND (exp, 1);
891 tree temp = TARGET_EXPR_SLOT (op0);
892 if (BINARY_CLASS_P (op1)
893 && TREE_OPERAND (op1, 1) == temp)
895 op1 = copy_node (op1);
896 TREE_OPERAND (op1, 1) = TARGET_EXPR_INITIAL (op0);
897 return check_omp_for_incr_expr (loc, op1, decl);
900 break;
902 default:
903 break;
906 return error_mark_node;
909 /* If the OMP_FOR increment expression in INCR is of pointer type,
910 canonicalize it into an expression handled by gimplify_omp_for()
911 and return it. DECL is the iteration variable. */
913 static tree
914 c_omp_for_incr_canonicalize_ptr (location_t loc, tree decl, tree incr)
916 if (POINTER_TYPE_P (TREE_TYPE (decl))
917 && TREE_OPERAND (incr, 1))
919 tree t = fold_convert_loc (loc,
920 sizetype, TREE_OPERAND (incr, 1));
922 if (TREE_CODE (incr) == POSTDECREMENT_EXPR
923 || TREE_CODE (incr) == PREDECREMENT_EXPR)
924 t = fold_build1_loc (loc, NEGATE_EXPR, sizetype, t);
925 t = fold_build_pointer_plus (decl, t);
926 incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
928 return incr;
931 /* Validate and generate OMP_FOR.
932 DECLV is a vector of iteration variables, for each collapsed loop.
934 ORIG_DECLV, if non-NULL, is a vector with the original iteration
935 variables (prior to any transformations, by say, C++ iterators).
937 INITV, CONDV and INCRV are vectors containing initialization
938 expressions, controlling predicates and increment expressions.
939 BODY is the body of the loop and PRE_BODY statements that go before
940 the loop. */
942 tree
943 c_finish_omp_for (location_t locus, enum tree_code code, tree declv,
944 tree orig_declv, tree initv, tree condv, tree incrv,
945 tree body, tree pre_body, bool final_p)
947 location_t elocus;
948 bool fail = false;
949 int i;
951 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
952 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
953 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
954 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
956 tree decl = TREE_VEC_ELT (declv, i);
957 tree init = TREE_VEC_ELT (initv, i);
958 tree cond = TREE_VEC_ELT (condv, i);
959 tree incr = TREE_VEC_ELT (incrv, i);
961 elocus = locus;
962 if (EXPR_HAS_LOCATION (init))
963 elocus = EXPR_LOCATION (init);
965 /* Validate the iteration variable. */
966 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
967 && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
969 error_at (elocus, "invalid type for iteration variable %qE", decl);
970 fail = true;
972 else if (TYPE_ATOMIC (TREE_TYPE (decl)))
974 error_at (elocus, "%<_Atomic%> iteration variable %qE", decl);
975 fail = true;
976 /* _Atomic iterator confuses stuff too much, so we risk ICE
977 trying to diagnose it further. */
978 continue;
981 /* In the case of "for (int i = 0...)", init will be a decl. It should
982 have a DECL_INITIAL that we can turn into an assignment. */
983 if (init == decl)
985 elocus = DECL_SOURCE_LOCATION (decl);
987 init = DECL_INITIAL (decl);
988 if (init == NULL)
990 error_at (elocus, "%qE is not initialized", decl);
991 init = integer_zero_node;
992 fail = true;
994 DECL_INITIAL (decl) = NULL_TREE;
996 init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
997 /* FIXME diagnostics: This should
998 be the location of the INIT. */
999 elocus,
1000 init,
1001 NULL_TREE);
1003 if (init != error_mark_node)
1005 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1006 gcc_assert (TREE_OPERAND (init, 0) == decl);
1009 if (cond == NULL_TREE)
1011 error_at (elocus, "missing controlling predicate");
1012 fail = true;
1014 else
1016 bool cond_ok = false;
1018 /* E.g. C sizeof (vla) could add COMPOUND_EXPRs with
1019 evaluation of the vla VAR_DECL. We need to readd
1020 them to the non-decl operand. See PR45784. */
1021 while (TREE_CODE (cond) == COMPOUND_EXPR)
1022 cond = TREE_OPERAND (cond, 1);
1024 if (EXPR_HAS_LOCATION (cond))
1025 elocus = EXPR_LOCATION (cond);
1027 if (TREE_CODE (cond) == LT_EXPR
1028 || TREE_CODE (cond) == LE_EXPR
1029 || TREE_CODE (cond) == GT_EXPR
1030 || TREE_CODE (cond) == GE_EXPR
1031 || TREE_CODE (cond) == NE_EXPR
1032 || TREE_CODE (cond) == EQ_EXPR)
1034 tree op0 = TREE_OPERAND (cond, 0);
1035 tree op1 = TREE_OPERAND (cond, 1);
1037 /* 2.5.1. The comparison in the condition is computed in
1038 the type of DECL, otherwise the behavior is undefined.
1040 For example:
1041 long n; int i;
1042 i < n;
1044 according to ISO will be evaluated as:
1045 (long)i < n;
1047 We want to force:
1048 i < (int)n; */
1049 if (TREE_CODE (op0) == NOP_EXPR
1050 && decl == TREE_OPERAND (op0, 0))
1052 TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
1053 TREE_OPERAND (cond, 1)
1054 = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1055 TREE_OPERAND (cond, 1));
1057 else if (TREE_CODE (op1) == NOP_EXPR
1058 && decl == TREE_OPERAND (op1, 0))
1060 TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
1061 TREE_OPERAND (cond, 0)
1062 = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1063 TREE_OPERAND (cond, 0));
1066 if (decl == TREE_OPERAND (cond, 0))
1067 cond_ok = true;
1068 else if (decl == TREE_OPERAND (cond, 1))
1070 TREE_SET_CODE (cond,
1071 swap_tree_comparison (TREE_CODE (cond)));
1072 TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
1073 TREE_OPERAND (cond, 0) = decl;
1074 cond_ok = true;
1077 if (TREE_CODE (cond) == NE_EXPR
1078 || TREE_CODE (cond) == EQ_EXPR)
1080 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
1082 if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1083 cond_ok = false;
1085 else if (operand_equal_p (TREE_OPERAND (cond, 1),
1086 TYPE_MIN_VALUE (TREE_TYPE (decl)),
1088 TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1089 ? GT_EXPR : LE_EXPR);
1090 else if (operand_equal_p (TREE_OPERAND (cond, 1),
1091 TYPE_MAX_VALUE (TREE_TYPE (decl)),
1093 TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1094 ? LT_EXPR : GE_EXPR);
1095 else if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1096 cond_ok = false;
1099 if (cond_ok && TREE_VEC_ELT (condv, i) != cond)
1101 tree ce = NULL_TREE, *pce = &ce;
1102 tree type = TREE_TYPE (TREE_OPERAND (cond, 1));
1103 for (tree c = TREE_VEC_ELT (condv, i); c != cond;
1104 c = TREE_OPERAND (c, 1))
1106 *pce = build2 (COMPOUND_EXPR, type, TREE_OPERAND (c, 0),
1107 TREE_OPERAND (cond, 1));
1108 pce = &TREE_OPERAND (*pce, 1);
1110 TREE_OPERAND (cond, 1) = ce;
1111 TREE_VEC_ELT (condv, i) = cond;
1115 if (!cond_ok)
1117 error_at (elocus, "invalid controlling predicate");
1118 fail = true;
1122 if (incr == NULL_TREE)
1124 error_at (elocus, "missing increment expression");
1125 fail = true;
1127 else
1129 bool incr_ok = false;
1131 if (EXPR_HAS_LOCATION (incr))
1132 elocus = EXPR_LOCATION (incr);
1134 /* Check all the valid increment expressions: v++, v--, ++v, --v,
1135 v = v + incr, v = incr + v and v = v - incr. */
1136 switch (TREE_CODE (incr))
1138 case POSTINCREMENT_EXPR:
1139 case PREINCREMENT_EXPR:
1140 case POSTDECREMENT_EXPR:
1141 case PREDECREMENT_EXPR:
1142 if (TREE_OPERAND (incr, 0) != decl)
1143 break;
1145 incr_ok = true;
1146 if (!fail
1147 && TREE_CODE (cond) == NE_EXPR
1148 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
1149 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
1150 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
1151 != INTEGER_CST))
1153 /* For pointer to VLA, transform != into < or >
1154 depending on whether incr is increment or decrement. */
1155 if (TREE_CODE (incr) == PREINCREMENT_EXPR
1156 || TREE_CODE (incr) == POSTINCREMENT_EXPR)
1157 TREE_SET_CODE (cond, LT_EXPR);
1158 else
1159 TREE_SET_CODE (cond, GT_EXPR);
1161 incr = c_omp_for_incr_canonicalize_ptr (elocus, decl, incr);
1162 break;
1164 case COMPOUND_EXPR:
1165 if (TREE_CODE (TREE_OPERAND (incr, 0)) != SAVE_EXPR
1166 || TREE_CODE (TREE_OPERAND (incr, 1)) != MODIFY_EXPR)
1167 break;
1168 incr = TREE_OPERAND (incr, 1);
1169 /* FALLTHRU */
1170 case MODIFY_EXPR:
1171 if (TREE_OPERAND (incr, 0) != decl)
1172 break;
1173 if (TREE_OPERAND (incr, 1) == decl)
1174 break;
1175 if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
1176 && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
1177 || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
1178 incr_ok = true;
1179 else if ((TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
1180 || (TREE_CODE (TREE_OPERAND (incr, 1))
1181 == POINTER_PLUS_EXPR))
1182 && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
1183 incr_ok = true;
1184 else
1186 tree t = check_omp_for_incr_expr (elocus,
1187 TREE_OPERAND (incr, 1),
1188 decl);
1189 if (t != error_mark_node)
1191 incr_ok = true;
1192 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
1193 incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
1196 if (!fail
1197 && incr_ok
1198 && TREE_CODE (cond) == NE_EXPR)
1200 tree i = TREE_OPERAND (incr, 1);
1201 i = TREE_OPERAND (i, TREE_OPERAND (i, 0) == decl);
1202 i = c_fully_fold (i, false, NULL);
1203 if (!final_p
1204 && TREE_CODE (i) != INTEGER_CST)
1206 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
1208 tree unit
1209 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
1210 if (unit)
1212 enum tree_code ccode = GT_EXPR;
1213 unit = c_fully_fold (unit, false, NULL);
1214 i = fold_convert (TREE_TYPE (unit), i);
1215 if (operand_equal_p (unit, i, 0))
1216 ccode = LT_EXPR;
1217 if (ccode == GT_EXPR)
1219 i = fold_unary (NEGATE_EXPR, TREE_TYPE (i), i);
1220 if (i == NULL_TREE
1221 || !operand_equal_p (unit, i, 0))
1223 error_at (elocus,
1224 "increment is not constant 1 or "
1225 "-1 for %<!=%> condition");
1226 fail = true;
1229 if (TREE_CODE (unit) != INTEGER_CST)
1230 /* For pointer to VLA, transform != into < or >
1231 depending on whether the pointer is
1232 incremented or decremented in each
1233 iteration. */
1234 TREE_SET_CODE (cond, ccode);
1237 else
1239 if (!integer_onep (i) && !integer_minus_onep (i))
1241 error_at (elocus,
1242 "increment is not constant 1 or -1 for"
1243 " %<!=%> condition");
1244 fail = true;
1248 break;
1250 default:
1251 break;
1253 if (!incr_ok)
1255 error_at (elocus, "invalid increment expression");
1256 fail = true;
1260 TREE_VEC_ELT (initv, i) = init;
1261 TREE_VEC_ELT (incrv, i) = incr;
1264 if (fail)
1265 return NULL;
1266 else
1268 tree t = make_node (code);
1270 TREE_TYPE (t) = void_type_node;
1271 OMP_FOR_INIT (t) = initv;
1272 OMP_FOR_COND (t) = condv;
1273 OMP_FOR_INCR (t) = incrv;
1274 OMP_FOR_BODY (t) = body;
1275 OMP_FOR_PRE_BODY (t) = pre_body;
1276 OMP_FOR_ORIG_DECLS (t) = orig_declv;
1278 SET_EXPR_LOCATION (t, locus);
1279 return t;
1283 /* Type for passing data in between c_omp_check_loop_iv and
1284 c_omp_check_loop_iv_r. */
1286 struct c_omp_check_loop_iv_data
1288 tree declv;
1289 bool fail;
1290 bool maybe_nonrect;
1291 location_t stmt_loc;
1292 location_t expr_loc;
1293 int kind;
1294 int idx;
1295 walk_tree_lh lh;
1296 hash_set<tree> *ppset;
1299 /* Return -1 if DECL is not a loop iterator in loop nest D, otherwise
1300 return the index of the loop in which it is an iterator.
1301 Return TREE_VEC_LENGTH (d->declv) if it is a C++ range for iterator. */
1303 static int
1304 c_omp_is_loop_iterator (tree decl, struct c_omp_check_loop_iv_data *d)
1306 for (int i = 0; i < TREE_VEC_LENGTH (d->declv); i++)
1307 if (decl == TREE_VEC_ELT (d->declv, i)
1308 || (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1309 && decl == TREE_PURPOSE (TREE_VEC_ELT (d->declv, i))))
1310 return i;
1311 else if (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1312 && TREE_CHAIN (TREE_VEC_ELT (d->declv, i))
1313 && (TREE_CODE (TREE_CHAIN (TREE_VEC_ELT (d->declv, i)))
1314 == TREE_VEC))
1315 for (int j = 2;
1316 j < TREE_VEC_LENGTH (TREE_CHAIN (TREE_VEC_ELT (d->declv, i))); j++)
1317 if (decl == TREE_VEC_ELT (TREE_CHAIN (TREE_VEC_ELT (d->declv, i)), j))
1318 return TREE_VEC_LENGTH (d->declv);
1319 return -1;
1322 /* Helper function called via walk_tree, to diagnose uses
1323 of associated loop IVs inside of lb, b and incr expressions
1324 of OpenMP loops. */
1326 static tree
1327 c_omp_check_loop_iv_r (tree *tp, int *walk_subtrees, void *data)
1329 struct c_omp_check_loop_iv_data *d
1330 = (struct c_omp_check_loop_iv_data *) data;
1331 if (DECL_P (*tp))
1333 int idx = c_omp_is_loop_iterator (*tp, d);
1334 if (idx == -1)
1335 return NULL_TREE;
1337 if ((d->kind & 4) && idx < d->idx)
1339 d->maybe_nonrect = true;
1340 return NULL_TREE;
1343 if (d->ppset->add (*tp))
1344 return NULL_TREE;
1346 location_t loc = d->expr_loc;
1347 if (loc == UNKNOWN_LOCATION)
1348 loc = d->stmt_loc;
1350 switch (d->kind & 3)
1352 case 0:
1353 error_at (loc, "initializer expression refers to "
1354 "iteration variable %qD", *tp);
1355 break;
1356 case 1:
1357 error_at (loc, "condition expression refers to "
1358 "iteration variable %qD", *tp);
1359 break;
1360 case 2:
1361 error_at (loc, "increment expression refers to "
1362 "iteration variable %qD", *tp);
1363 break;
1365 d->fail = true;
1367 else if ((d->kind & 4)
1368 && TREE_CODE (*tp) != TREE_VEC
1369 && TREE_CODE (*tp) != PLUS_EXPR
1370 && TREE_CODE (*tp) != MINUS_EXPR
1371 && TREE_CODE (*tp) != MULT_EXPR
1372 && TREE_CODE (*tp) != POINTER_PLUS_EXPR
1373 && !CONVERT_EXPR_P (*tp))
1375 *walk_subtrees = 0;
1376 d->kind &= 3;
1377 walk_tree_1 (tp, c_omp_check_loop_iv_r, data, NULL, d->lh);
1378 d->kind |= 4;
1379 return NULL_TREE;
1381 else if (d->ppset->add (*tp))
1382 *walk_subtrees = 0;
1383 /* Don't walk dtors added by C++ wrap_cleanups_r. */
1384 else if (TREE_CODE (*tp) == TRY_CATCH_EXPR
1385 && TRY_CATCH_IS_CLEANUP (*tp))
1387 *walk_subtrees = 0;
1388 return walk_tree_1 (&TREE_OPERAND (*tp, 0), c_omp_check_loop_iv_r, data,
1389 NULL, d->lh);
1392 return NULL_TREE;
1395 /* Check the allowed expressions for non-rectangular loop nest lb and b
1396 expressions. Return the outer var decl referenced in the expression. */
1398 static tree
1399 c_omp_check_nonrect_loop_iv (tree *tp, struct c_omp_check_loop_iv_data *d,
1400 walk_tree_lh lh)
1402 d->maybe_nonrect = false;
1403 if (d->fail)
1404 return NULL_TREE;
1406 hash_set<tree> pset;
1407 hash_set<tree> *ppset = d->ppset;
1408 d->ppset = &pset;
1410 tree t = *tp;
1411 if (TREE_CODE (t) == TREE_VEC
1412 && TREE_VEC_LENGTH (t) == 3
1413 && DECL_P (TREE_VEC_ELT (t, 0))
1414 && c_omp_is_loop_iterator (TREE_VEC_ELT (t, 0), d) >= 0)
1416 d->kind &= 3;
1417 walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1418 walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1419 d->ppset = ppset;
1420 return d->fail ? NULL_TREE : TREE_VEC_ELT (t, 0);
1423 while (CONVERT_EXPR_P (t))
1424 t = TREE_OPERAND (t, 0);
1426 tree a1 = t, a2 = integer_zero_node;
1427 bool neg_a1 = false, neg_a2 = false;
1428 switch (TREE_CODE (t))
1430 case PLUS_EXPR:
1431 case MINUS_EXPR:
1432 a1 = TREE_OPERAND (t, 0);
1433 a2 = TREE_OPERAND (t, 1);
1434 while (CONVERT_EXPR_P (a1))
1435 a1 = TREE_OPERAND (a1, 0);
1436 while (CONVERT_EXPR_P (a2))
1437 a2 = TREE_OPERAND (a2, 0);
1438 if (DECL_P (a1) && c_omp_is_loop_iterator (a1, d) >= 0)
1440 a2 = TREE_OPERAND (t, 1);
1441 if (TREE_CODE (t) == MINUS_EXPR)
1442 neg_a2 = true;
1443 t = a1;
1444 break;
1446 if (DECL_P (a2) && c_omp_is_loop_iterator (a2, d) >= 0)
1448 a1 = TREE_OPERAND (t, 0);
1449 if (TREE_CODE (t) == MINUS_EXPR)
1450 neg_a1 = true;
1451 t = a2;
1452 a2 = a1;
1453 break;
1455 if (TREE_CODE (a1) == MULT_EXPR && TREE_CODE (a2) == MULT_EXPR)
1457 tree o1 = TREE_OPERAND (a1, 0);
1458 tree o2 = TREE_OPERAND (a1, 1);
1459 while (CONVERT_EXPR_P (o1))
1460 o1 = TREE_OPERAND (o1, 0);
1461 while (CONVERT_EXPR_P (o2))
1462 o2 = TREE_OPERAND (o2, 0);
1463 if ((DECL_P (o1) && c_omp_is_loop_iterator (o1, d) >= 0)
1464 || (DECL_P (o2) && c_omp_is_loop_iterator (o2, d) >= 0))
1466 a2 = TREE_OPERAND (t, 1);
1467 if (TREE_CODE (t) == MINUS_EXPR)
1468 neg_a2 = true;
1469 t = a1;
1470 break;
1473 if (TREE_CODE (a2) == MULT_EXPR)
1475 a1 = TREE_OPERAND (t, 0);
1476 if (TREE_CODE (t) == MINUS_EXPR)
1477 neg_a1 = true;
1478 t = a2;
1479 a2 = a1;
1480 break;
1482 if (TREE_CODE (a1) == MULT_EXPR)
1484 a2 = TREE_OPERAND (t, 1);
1485 if (TREE_CODE (t) == MINUS_EXPR)
1486 neg_a2 = true;
1487 t = a1;
1488 break;
1490 a2 = integer_zero_node;
1491 break;
1492 case POINTER_PLUS_EXPR:
1493 a1 = TREE_OPERAND (t, 0);
1494 a2 = TREE_OPERAND (t, 1);
1495 while (CONVERT_EXPR_P (a1))
1496 a1 = TREE_OPERAND (a1, 0);
1497 if (DECL_P (a1) && c_omp_is_loop_iterator (a1, d) >= 0)
1499 a2 = TREE_OPERAND (t, 1);
1500 t = a1;
1501 break;
1503 break;
1504 default:
1505 break;
1508 a1 = integer_one_node;
1509 if (TREE_CODE (t) == MULT_EXPR)
1511 tree o1 = TREE_OPERAND (t, 0);
1512 tree o2 = TREE_OPERAND (t, 1);
1513 while (CONVERT_EXPR_P (o1))
1514 o1 = TREE_OPERAND (o1, 0);
1515 while (CONVERT_EXPR_P (o2))
1516 o2 = TREE_OPERAND (o2, 0);
1517 if (DECL_P (o1) && c_omp_is_loop_iterator (o1, d) >= 0)
1519 a1 = TREE_OPERAND (t, 1);
1520 t = o1;
1522 else if (DECL_P (o2) && c_omp_is_loop_iterator (o2, d) >= 0)
1524 a1 = TREE_OPERAND (t, 0);
1525 t = o2;
1529 d->kind &= 3;
1530 tree ret = NULL_TREE;
1531 if (DECL_P (t) && c_omp_is_loop_iterator (t, d) >= 0)
1533 location_t loc = d->expr_loc;
1534 if (loc == UNKNOWN_LOCATION)
1535 loc = d->stmt_loc;
1536 if (!lang_hooks.types_compatible_p (TREE_TYPE (*tp), TREE_TYPE (t)))
1538 if (d->kind == 0)
1539 error_at (loc, "outer iteration variable %qD used in initializer"
1540 " expression has type other than %qT",
1541 t, TREE_TYPE (*tp));
1542 else
1543 error_at (loc, "outer iteration variable %qD used in condition"
1544 " expression has type other than %qT",
1545 t, TREE_TYPE (*tp));
1546 d->fail = true;
1548 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a1)))
1550 error_at (loc, "outer iteration variable %qD multiplier expression"
1551 " %qE is not integral", t, a1);
1552 d->fail = true;
1554 else if (!INTEGRAL_TYPE_P (TREE_TYPE (a2)))
1556 error_at (loc, "outer iteration variable %qD addend expression"
1557 " %qE is not integral", t, a2);
1558 d->fail = true;
1560 else
1562 walk_tree_1 (&a1, c_omp_check_loop_iv_r, d, NULL, lh);
1563 walk_tree_1 (&a2, c_omp_check_loop_iv_r, d, NULL, lh);
1565 if (!d->fail)
1567 a1 = fold_convert (TREE_TYPE (*tp), a1);
1568 a2 = fold_convert (TREE_TYPE (*tp), a2);
1569 if (neg_a1)
1570 a1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a1), a1);
1571 if (neg_a2)
1572 a2 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a2), a2);
1573 ret = t;
1574 *tp = make_tree_vec (3);
1575 TREE_VEC_ELT (*tp, 0) = t;
1576 TREE_VEC_ELT (*tp, 1) = a1;
1577 TREE_VEC_ELT (*tp, 2) = a2;
1580 else
1581 walk_tree_1 (&t, c_omp_check_loop_iv_r, d, NULL, lh);
1583 d->ppset = ppset;
1584 return ret;
1587 /* Diagnose invalid references to loop iterators in lb, b and incr
1588 expressions. */
1590 bool
1591 c_omp_check_loop_iv (tree stmt, tree declv, walk_tree_lh lh)
1593 hash_set<tree> pset;
1594 struct c_omp_check_loop_iv_data data;
1595 int i;
1597 data.declv = declv;
1598 data.fail = false;
1599 data.maybe_nonrect = false;
1600 data.stmt_loc = EXPR_LOCATION (stmt);
1601 data.lh = lh;
1602 data.ppset = &pset;
1603 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1605 tree init = TREE_VEC_ELT (OMP_FOR_INIT (stmt), i);
1606 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1607 tree decl = TREE_OPERAND (init, 0);
1608 tree cond = TREE_VEC_ELT (OMP_FOR_COND (stmt), i);
1609 gcc_assert (COMPARISON_CLASS_P (cond));
1610 gcc_assert (TREE_OPERAND (cond, 0) == decl);
1611 tree incr = TREE_VEC_ELT (OMP_FOR_INCR (stmt), i);
1612 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (init, 1));
1613 tree vec_outer1 = NULL_TREE, vec_outer2 = NULL_TREE;
1614 int kind = 0;
1615 if (i > 0
1616 && (unsigned) c_omp_is_loop_iterator (decl, &data) < (unsigned) i)
1618 location_t loc = data.expr_loc;
1619 if (loc == UNKNOWN_LOCATION)
1620 loc = data.stmt_loc;
1621 error_at (loc, "the same loop iteration variables %qD used in "
1622 "multiple associated loops", decl);
1623 data.fail = true;
1625 /* Handle non-rectangular loop nests. */
1626 if (TREE_CODE (stmt) != OACC_LOOP && i > 0)
1627 kind = 4;
1628 data.kind = kind;
1629 data.idx = i;
1630 walk_tree_1 (&TREE_OPERAND (init, 1),
1631 c_omp_check_loop_iv_r, &data, NULL, lh);
1632 if (data.maybe_nonrect)
1633 vec_outer1 = c_omp_check_nonrect_loop_iv (&TREE_OPERAND (init, 1),
1634 &data, lh);
1635 /* Don't warn for C++ random access iterators here, the
1636 expression then involves the subtraction and always refers
1637 to the original value. The C++ FE needs to warn on those
1638 earlier. */
1639 if (decl == TREE_VEC_ELT (declv, i)
1640 || (TREE_CODE (TREE_VEC_ELT (declv, i)) == TREE_LIST
1641 && decl == TREE_PURPOSE (TREE_VEC_ELT (declv, i))))
1643 data.expr_loc = EXPR_LOCATION (cond);
1644 data.kind = kind | 1;
1645 walk_tree_1 (&TREE_OPERAND (cond, 1),
1646 c_omp_check_loop_iv_r, &data, NULL, lh);
1647 if (data.maybe_nonrect)
1648 vec_outer2 = c_omp_check_nonrect_loop_iv (&TREE_OPERAND (cond, 1),
1649 &data, lh);
1651 if (vec_outer1 && vec_outer2 && vec_outer1 != vec_outer2)
1653 location_t loc = data.expr_loc;
1654 if (loc == UNKNOWN_LOCATION)
1655 loc = data.stmt_loc;
1656 error_at (loc, "two different outer iteration variables %qD and %qD"
1657 " used in a single loop", vec_outer1, vec_outer2);
1658 data.fail = true;
1660 if (vec_outer1 || vec_outer2)
1661 OMP_FOR_NON_RECTANGULAR (stmt) = 1;
1662 if (TREE_CODE (incr) == MODIFY_EXPR)
1664 gcc_assert (TREE_OPERAND (incr, 0) == decl);
1665 incr = TREE_OPERAND (incr, 1);
1666 data.kind = 2;
1667 if (TREE_CODE (incr) == PLUS_EXPR
1668 && TREE_OPERAND (incr, 1) == decl)
1670 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 0));
1671 walk_tree_1 (&TREE_OPERAND (incr, 0),
1672 c_omp_check_loop_iv_r, &data, NULL, lh);
1674 else
1676 data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 1));
1677 walk_tree_1 (&TREE_OPERAND (incr, 1),
1678 c_omp_check_loop_iv_r, &data, NULL, lh);
1682 return !data.fail;
1685 /* Similar, but allows to check the init or cond expressions individually. */
1687 bool
1688 c_omp_check_loop_iv_exprs (location_t stmt_loc, enum tree_code code,
1689 tree declv, int i, tree decl, tree init, tree cond,
1690 walk_tree_lh lh)
1692 hash_set<tree> pset;
1693 struct c_omp_check_loop_iv_data data;
1694 int kind = (code != OACC_LOOP && i > 0) ? 4 : 0;
1696 data.declv = declv;
1697 data.fail = false;
1698 data.maybe_nonrect = false;
1699 data.stmt_loc = stmt_loc;
1700 data.lh = lh;
1701 data.ppset = &pset;
1702 data.idx = i;
1703 if (i > 0
1704 && (unsigned) c_omp_is_loop_iterator (decl, &data) < (unsigned) i)
1706 error_at (stmt_loc, "the same loop iteration variables %qD used in "
1707 "multiple associated loops", decl);
1708 data.fail = true;
1710 if (init)
1712 data.expr_loc = EXPR_LOCATION (init);
1713 data.kind = kind;
1714 walk_tree_1 (&init,
1715 c_omp_check_loop_iv_r, &data, NULL, lh);
1717 if (cond)
1719 gcc_assert (COMPARISON_CLASS_P (cond));
1720 data.expr_loc = EXPR_LOCATION (init);
1721 data.kind = kind | 1;
1722 if (TREE_OPERAND (cond, 0) == decl)
1723 walk_tree_1 (&TREE_OPERAND (cond, 1),
1724 c_omp_check_loop_iv_r, &data, NULL, lh);
1725 else
1726 walk_tree_1 (&TREE_OPERAND (cond, 0),
1727 c_omp_check_loop_iv_r, &data, NULL, lh);
1729 return !data.fail;
1733 /* Helper function for c_omp_check_loop_binding_exprs: look for a binding
1734 of DECL in BODY. Only traverse things that might be containers for
1735 intervening code in an OMP loop. Returns the BIND_EXPR or DECL_EXPR
1736 if found, otherwise null. */
1738 static tree
1739 find_binding_in_body (tree decl, tree body)
1741 if (!body)
1742 return NULL_TREE;
1744 switch (TREE_CODE (body))
1746 case BIND_EXPR:
1747 for (tree b = BIND_EXPR_VARS (body); b; b = DECL_CHAIN (b))
1748 if (b == decl)
1749 return body;
1750 return find_binding_in_body (decl, BIND_EXPR_BODY (body));
1752 case DECL_EXPR:
1753 if (DECL_EXPR_DECL (body) == decl)
1754 return body;
1755 return NULL_TREE;
1757 case STATEMENT_LIST:
1758 for (tree_stmt_iterator si = tsi_start (body); !tsi_end_p (si);
1759 tsi_next (&si))
1761 tree b = find_binding_in_body (decl, tsi_stmt (si));
1762 if (b)
1763 return b;
1765 return NULL_TREE;
1767 case OMP_STRUCTURED_BLOCK:
1768 return find_binding_in_body (decl, OMP_BODY (body));
1770 default:
1771 return NULL_TREE;
1775 /* Traversal function for check_loop_binding_expr, to diagnose
1776 errors when a binding made in intervening code is referenced outside
1777 of the loop. Returns non-null if such a reference is found. DATA points
1778 to the tree containing the loop body. */
1780 static tree
1781 check_loop_binding_expr_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
1782 void *data)
1784 tree body = *(tree *)data;
1786 if (DECL_P (*tp) && find_binding_in_body (*tp, body))
1787 return *tp;
1788 return NULL_TREE;
1791 /* Helper macro used below. */
1793 #define LOCATION_OR(loc1, loc2) \
1794 ((loc1) != UNKNOWN_LOCATION ? (loc1) : (loc2))
1796 /* Check a single expression EXPR for references to variables bound in
1797 intervening code in BODY. Return true if ok, otherwise give an error
1798 referencing CONTEXT and return false. Use LOC for the error message
1799 if EXPR doesn't have one. */
1800 static bool
1801 check_loop_binding_expr (tree expr, tree body, const char *context,
1802 location_t loc)
1804 tree bad = walk_tree (&expr, check_loop_binding_expr_r, (void *)&body, NULL);
1806 if (bad)
1808 location_t eloc = EXPR_LOCATION (expr);
1809 error_at (LOCATION_OR (eloc, loc),
1810 "variable %qD used %s is bound "
1811 "in intervening code", bad, context);
1812 return false;
1814 return true;
1817 /* STMT is an OMP_FOR construct. Check all of the iteration variable,
1818 initializer, end condition, and increment for bindings inside the
1819 loop body. If ORIG_INITS is provided, check those elements too.
1820 Return true if OK, false otherwise. */
1821 bool
1822 c_omp_check_loop_binding_exprs (tree stmt, vec<tree> *orig_inits)
1824 bool ok = true;
1825 location_t loc = EXPR_LOCATION (stmt);
1826 tree body = OMP_FOR_BODY (stmt);
1827 int orig_init_length = orig_inits ? orig_inits->length () : 0;
1829 for (int i = 1; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1831 tree init = TREE_VEC_ELT (OMP_FOR_INIT (stmt), i);
1832 tree cond = TREE_VEC_ELT (OMP_FOR_COND (stmt), i);
1833 tree incr = TREE_VEC_ELT (OMP_FOR_INCR (stmt), i);
1834 gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1835 tree decl = TREE_OPERAND (init, 0);
1836 tree orig_init = i < orig_init_length ? (*orig_inits)[i] : NULL_TREE;
1837 tree e;
1838 location_t eloc;
1840 e = TREE_OPERAND (init, 1);
1841 eloc = LOCATION_OR (EXPR_LOCATION (init), loc);
1842 if (!check_loop_binding_expr (decl, body, "as loop variable", eloc))
1843 ok = false;
1844 if (!check_loop_binding_expr (e, body, "in initializer", eloc))
1845 ok = false;
1846 if (orig_init
1847 && !check_loop_binding_expr (orig_init, body,
1848 "in initializer", eloc))
1849 ok = false;
1851 /* INCR and/or COND may be null if this is a template with a
1852 class iterator. */
1853 if (cond)
1855 eloc = LOCATION_OR (EXPR_LOCATION (cond), loc);
1856 if (COMPARISON_CLASS_P (cond) && TREE_OPERAND (cond, 0) == decl)
1857 e = TREE_OPERAND (cond, 1);
1858 else if (COMPARISON_CLASS_P (cond) && TREE_OPERAND (cond, 1) == decl)
1859 e = TREE_OPERAND (cond, 0);
1860 else
1861 e = cond;
1862 if (!check_loop_binding_expr (e, body, "in end test", eloc))
1863 ok = false;
1866 if (incr)
1868 eloc = LOCATION_OR (EXPR_LOCATION (incr), loc);
1869 /* INCR should be either a MODIFY_EXPR or pre/post
1870 increment/decrement. We don't have to check the latter
1871 since there are no operands besides the iteration variable. */
1872 if (TREE_CODE (incr) == MODIFY_EXPR
1873 && !check_loop_binding_expr (TREE_OPERAND (incr, 1), body,
1874 "in increment expression", eloc))
1875 ok = false;
1879 return ok;
1882 /* This function splits clauses for OpenACC combined loop
1883 constructs. OpenACC combined loop constructs are:
1884 #pragma acc kernels loop
1885 #pragma acc parallel loop */
1887 tree
1888 c_oacc_split_loop_clauses (tree clauses, tree *not_loop_clauses,
1889 bool is_parallel)
1891 tree next, loop_clauses, nc;
1893 loop_clauses = *not_loop_clauses = NULL_TREE;
1894 for (; clauses ; clauses = next)
1896 next = OMP_CLAUSE_CHAIN (clauses);
1898 switch (OMP_CLAUSE_CODE (clauses))
1900 /* Loop clauses. */
1901 case OMP_CLAUSE_COLLAPSE:
1902 case OMP_CLAUSE_TILE:
1903 case OMP_CLAUSE_GANG:
1904 case OMP_CLAUSE_WORKER:
1905 case OMP_CLAUSE_VECTOR:
1906 case OMP_CLAUSE_AUTO:
1907 case OMP_CLAUSE_SEQ:
1908 case OMP_CLAUSE_INDEPENDENT:
1909 case OMP_CLAUSE_PRIVATE:
1910 OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1911 loop_clauses = clauses;
1912 break;
1914 /* Reductions must be duplicated on both constructs. */
1915 case OMP_CLAUSE_REDUCTION:
1916 if (is_parallel)
1918 nc = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
1919 OMP_CLAUSE_REDUCTION);
1920 OMP_CLAUSE_DECL (nc) = OMP_CLAUSE_DECL (clauses);
1921 OMP_CLAUSE_REDUCTION_CODE (nc)
1922 = OMP_CLAUSE_REDUCTION_CODE (clauses);
1923 OMP_CLAUSE_CHAIN (nc) = *not_loop_clauses;
1924 *not_loop_clauses = nc;
1927 OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1928 loop_clauses = clauses;
1929 break;
1931 /* Parallel/kernels clauses. */
1932 default:
1933 OMP_CLAUSE_CHAIN (clauses) = *not_loop_clauses;
1934 *not_loop_clauses = clauses;
1935 break;
1939 return loop_clauses;
1942 /* This function attempts to split or duplicate clauses for OpenMP
1943 combined/composite constructs. Right now there are 30 different
1944 constructs. CODE is the innermost construct in the combined construct,
1945 and MASK allows to determine which constructs are combined together,
1946 as every construct has at least one clause that no other construct
1947 has (except for OMP_SECTIONS, but that can be only combined with parallel,
1948 and OMP_MASTER, which doesn't have any clauses at all).
1949 OpenMP combined/composite constructs are:
1950 #pragma omp distribute parallel for
1951 #pragma omp distribute parallel for simd
1952 #pragma omp distribute simd
1953 #pragma omp for simd
1954 #pragma omp masked taskloop
1955 #pragma omp masked taskloop simd
1956 #pragma omp master taskloop
1957 #pragma omp master taskloop simd
1958 #pragma omp parallel for
1959 #pragma omp parallel for simd
1960 #pragma omp parallel loop
1961 #pragma omp parallel masked
1962 #pragma omp parallel masked taskloop
1963 #pragma omp parallel masked taskloop simd
1964 #pragma omp parallel master
1965 #pragma omp parallel master taskloop
1966 #pragma omp parallel master taskloop simd
1967 #pragma omp parallel sections
1968 #pragma omp target parallel
1969 #pragma omp target parallel for
1970 #pragma omp target parallel for simd
1971 #pragma omp target parallel loop
1972 #pragma omp target teams
1973 #pragma omp target teams distribute
1974 #pragma omp target teams distribute parallel for
1975 #pragma omp target teams distribute parallel for simd
1976 #pragma omp target teams distribute simd
1977 #pragma omp target teams loop
1978 #pragma omp target simd
1979 #pragma omp taskloop simd
1980 #pragma omp teams distribute
1981 #pragma omp teams distribute parallel for
1982 #pragma omp teams distribute parallel for simd
1983 #pragma omp teams distribute simd
1984 #pragma omp teams loop */
1986 void
1987 c_omp_split_clauses (location_t loc, enum tree_code code,
1988 omp_clause_mask mask, tree clauses, tree *cclauses)
1990 tree next, c;
1991 enum c_omp_clause_split s;
1992 int i;
1993 bool has_dup_allocate = false;
1995 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
1996 cclauses[i] = NULL;
1997 /* Add implicit nowait clause on
1998 #pragma omp parallel {for,for simd,sections}. */
1999 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2000 switch (code)
2002 case OMP_FOR:
2003 case OMP_SIMD:
2004 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2005 cclauses[C_OMP_CLAUSE_SPLIT_FOR]
2006 = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
2007 break;
2008 case OMP_SECTIONS:
2009 cclauses[C_OMP_CLAUSE_SPLIT_SECTIONS]
2010 = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
2011 break;
2012 default:
2013 break;
2016 for (; clauses ; clauses = next)
2018 next = OMP_CLAUSE_CHAIN (clauses);
2020 switch (OMP_CLAUSE_CODE (clauses))
2022 /* First the clauses that are unique to some constructs. */
2023 case OMP_CLAUSE_DEVICE:
2024 case OMP_CLAUSE_MAP:
2025 case OMP_CLAUSE_IS_DEVICE_PTR:
2026 case OMP_CLAUSE_HAS_DEVICE_ADDR:
2027 case OMP_CLAUSE_DEFAULTMAP:
2028 case OMP_CLAUSE_DEPEND:
2029 s = C_OMP_CLAUSE_SPLIT_TARGET;
2030 break;
2031 case OMP_CLAUSE_DOACROSS:
2032 /* This can happen with invalid depend(source) or
2033 depend(sink:vec) on target combined with other constructs. */
2034 gcc_assert (OMP_CLAUSE_DOACROSS_DEPEND (clauses));
2035 s = C_OMP_CLAUSE_SPLIT_TARGET;
2036 break;
2037 case OMP_CLAUSE_NUM_TEAMS:
2038 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2039 break;
2040 case OMP_CLAUSE_DIST_SCHEDULE:
2041 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2042 break;
2043 case OMP_CLAUSE_COPYIN:
2044 case OMP_CLAUSE_NUM_THREADS:
2045 case OMP_CLAUSE_PROC_BIND:
2046 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2047 break;
2048 case OMP_CLAUSE_ORDERED:
2049 s = C_OMP_CLAUSE_SPLIT_FOR;
2050 break;
2051 case OMP_CLAUSE_SCHEDULE:
2052 s = C_OMP_CLAUSE_SPLIT_FOR;
2053 if (code != OMP_SIMD)
2054 OMP_CLAUSE_SCHEDULE_SIMD (clauses) = 0;
2055 break;
2056 case OMP_CLAUSE_SAFELEN:
2057 case OMP_CLAUSE_SIMDLEN:
2058 case OMP_CLAUSE_ALIGNED:
2059 case OMP_CLAUSE_NONTEMPORAL:
2060 s = C_OMP_CLAUSE_SPLIT_SIMD;
2061 break;
2062 case OMP_CLAUSE_GRAINSIZE:
2063 case OMP_CLAUSE_NUM_TASKS:
2064 case OMP_CLAUSE_FINAL:
2065 case OMP_CLAUSE_UNTIED:
2066 case OMP_CLAUSE_MERGEABLE:
2067 case OMP_CLAUSE_NOGROUP:
2068 case OMP_CLAUSE_PRIORITY:
2069 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2070 break;
2071 case OMP_CLAUSE_BIND:
2072 s = C_OMP_CLAUSE_SPLIT_LOOP;
2073 break;
2074 case OMP_CLAUSE_FILTER:
2075 s = C_OMP_CLAUSE_SPLIT_MASKED;
2076 break;
2077 /* Duplicate this to all of taskloop, distribute, for, simd and
2078 loop. */
2079 case OMP_CLAUSE_COLLAPSE:
2080 if (code == OMP_SIMD)
2082 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
2083 | (OMP_CLAUSE_MASK_1
2084 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
2085 | (OMP_CLAUSE_MASK_1
2086 << PRAGMA_OMP_CLAUSE_NOGROUP))) != 0)
2088 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2089 OMP_CLAUSE_COLLAPSE);
2090 OMP_CLAUSE_COLLAPSE_EXPR (c)
2091 = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
2092 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2093 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2095 else
2097 /* This must be #pragma omp target simd */
2098 s = C_OMP_CLAUSE_SPLIT_SIMD;
2099 break;
2102 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2104 if ((mask & (OMP_CLAUSE_MASK_1
2105 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2107 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2108 OMP_CLAUSE_COLLAPSE);
2109 OMP_CLAUSE_COLLAPSE_EXPR (c)
2110 = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
2111 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
2112 cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
2113 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2115 else
2116 s = C_OMP_CLAUSE_SPLIT_FOR;
2118 else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2119 != 0)
2120 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2121 else if (code == OMP_LOOP)
2122 s = C_OMP_CLAUSE_SPLIT_LOOP;
2123 else
2124 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2125 break;
2126 /* Private clause is supported on all constructs but master/masked,
2127 it is enough to put it on the innermost one other than
2128 master/masked. For #pragma omp {for,sections} put it on parallel
2129 though, as that's what we did for OpenMP 3.1. */
2130 case OMP_CLAUSE_PRIVATE:
2131 switch (code)
2133 case OMP_SIMD: s = C_OMP_CLAUSE_SPLIT_SIMD; break;
2134 case OMP_FOR: case OMP_SECTIONS:
2135 case OMP_PARALLEL: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2136 case OMP_DISTRIBUTE: s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE; break;
2137 case OMP_TEAMS: s = C_OMP_CLAUSE_SPLIT_TEAMS; break;
2138 case OMP_MASTER: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2139 case OMP_MASKED: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
2140 case OMP_TASKLOOP: s = C_OMP_CLAUSE_SPLIT_TASKLOOP; break;
2141 case OMP_LOOP: s = C_OMP_CLAUSE_SPLIT_LOOP; break;
2142 default: gcc_unreachable ();
2144 break;
2145 /* Firstprivate clause is supported on all constructs but
2146 simd, master, masked and loop. Put it on the outermost of those
2147 and duplicate on teams and parallel. */
2148 case OMP_CLAUSE_FIRSTPRIVATE:
2149 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2150 != 0)
2152 if (code == OMP_SIMD
2153 && (mask & ((OMP_CLAUSE_MASK_1
2154 << PRAGMA_OMP_CLAUSE_NUM_THREADS)
2155 | (OMP_CLAUSE_MASK_1
2156 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))) == 0)
2158 /* This must be #pragma omp target simd. */
2159 s = C_OMP_CLAUSE_SPLIT_TARGET;
2160 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clauses) = 1;
2161 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (clauses) = 1;
2162 break;
2164 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2165 OMP_CLAUSE_FIRSTPRIVATE);
2166 /* firstprivate should not be applied to target if it is
2167 also lastprivate or on the combined/composite construct,
2168 or if it is mentioned in map clause. OMP_CLAUSE_DECLs
2169 may need to go through FE handling though (instantiation,
2170 C++ non-static data members, array section lowering), so
2171 add the clause with OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT and
2172 let *finish_omp_clauses and the gimplifier handle it
2173 right. */
2174 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 1;
2175 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2176 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2177 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2179 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2180 != 0)
2182 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)
2183 | (OMP_CLAUSE_MASK_1
2184 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE))) != 0)
2186 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2187 OMP_CLAUSE_FIRSTPRIVATE);
2188 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2189 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2190 cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2191 if ((mask & (OMP_CLAUSE_MASK_1
2192 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2193 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2194 else
2195 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2197 else if ((mask & (OMP_CLAUSE_MASK_1
2198 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2199 /* This must be
2200 #pragma omp parallel mas{ked,ter} taskloop{, simd}. */
2201 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2202 else
2203 /* This must be
2204 #pragma omp parallel{, for{, simd}, sections,loop}
2206 #pragma omp target parallel. */
2207 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2209 else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2210 != 0)
2212 /* This must be one of
2213 #pragma omp {,target }teams {distribute,loop}
2214 #pragma omp target teams
2215 #pragma omp {,target }teams distribute simd. */
2216 gcc_assert (code == OMP_DISTRIBUTE
2217 || code == OMP_LOOP
2218 || code == OMP_TEAMS
2219 || code == OMP_SIMD);
2220 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2222 else if ((mask & (OMP_CLAUSE_MASK_1
2223 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2225 /* This must be #pragma omp distribute simd. */
2226 gcc_assert (code == OMP_SIMD);
2227 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2229 else if ((mask & (OMP_CLAUSE_MASK_1
2230 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2232 /* This must be
2233 #pragma omp {,{,parallel }mas{ked,ter} }taskloop simd
2235 #pragma omp {,parallel }mas{ked,ter} taskloop. */
2236 gcc_assert (code == OMP_SIMD || code == OMP_TASKLOOP);
2237 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2239 else
2241 /* This must be #pragma omp for simd. */
2242 gcc_assert (code == OMP_SIMD);
2243 s = C_OMP_CLAUSE_SPLIT_FOR;
2245 break;
2246 /* Lastprivate is allowed on distribute, for, sections, taskloop, loop
2247 and simd. In parallel {for{, simd},sections} we actually want to
2248 put it on parallel rather than for or sections. */
2249 case OMP_CLAUSE_LASTPRIVATE:
2250 if (code == OMP_DISTRIBUTE)
2252 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2253 break;
2255 if ((mask & (OMP_CLAUSE_MASK_1
2256 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2258 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2259 OMP_CLAUSE_LASTPRIVATE);
2260 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2261 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2262 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2263 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2264 cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2266 if (code == OMP_FOR || code == OMP_SECTIONS)
2268 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2269 != 0)
2270 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2271 else
2272 s = C_OMP_CLAUSE_SPLIT_FOR;
2273 break;
2275 if (code == OMP_TASKLOOP)
2277 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2278 break;
2280 if (code == OMP_LOOP)
2282 s = C_OMP_CLAUSE_SPLIT_LOOP;
2283 break;
2285 gcc_assert (code == OMP_SIMD);
2286 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2288 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2289 OMP_CLAUSE_LASTPRIVATE);
2290 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2291 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2292 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2293 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2294 != 0)
2295 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2296 else
2297 s = C_OMP_CLAUSE_SPLIT_FOR;
2298 OMP_CLAUSE_CHAIN (c) = cclauses[s];
2299 cclauses[s] = c;
2301 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2303 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2304 OMP_CLAUSE_LASTPRIVATE);
2305 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2306 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2307 = OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2308 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2309 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2311 s = C_OMP_CLAUSE_SPLIT_SIMD;
2312 break;
2313 /* Shared and default clauses are allowed on parallel, teams and
2314 taskloop. */
2315 case OMP_CLAUSE_SHARED:
2316 case OMP_CLAUSE_DEFAULT:
2317 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2318 != 0)
2320 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2321 != 0)
2323 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2324 OMP_CLAUSE_CODE (clauses));
2325 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2326 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2327 else
2328 OMP_CLAUSE_DEFAULT_KIND (c)
2329 = OMP_CLAUSE_DEFAULT_KIND (clauses);
2330 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2331 cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2333 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2334 break;
2336 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2337 != 0)
2339 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2340 == 0)
2342 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2343 break;
2345 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2346 OMP_CLAUSE_CODE (clauses));
2347 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2348 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2349 else
2350 OMP_CLAUSE_DEFAULT_KIND (c)
2351 = OMP_CLAUSE_DEFAULT_KIND (clauses);
2352 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2353 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2355 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2356 break;
2357 /* order clauses are allowed on distribute, for, simd and loop. */
2358 case OMP_CLAUSE_ORDER:
2359 if ((mask & (OMP_CLAUSE_MASK_1
2360 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2362 if (code == OMP_DISTRIBUTE)
2364 s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2365 break;
2367 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2368 OMP_CLAUSE_ORDER);
2369 OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2370 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2371 OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2372 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2373 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2374 cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2376 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2378 if (code == OMP_SIMD)
2380 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2381 OMP_CLAUSE_ORDER);
2382 OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2383 = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2384 OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2385 = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2386 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
2387 cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
2388 s = C_OMP_CLAUSE_SPLIT_SIMD;
2390 else
2391 s = C_OMP_CLAUSE_SPLIT_FOR;
2393 else if (code == OMP_LOOP)
2394 s = C_OMP_CLAUSE_SPLIT_LOOP;
2395 else
2396 s = C_OMP_CLAUSE_SPLIT_SIMD;
2397 break;
2398 /* Reduction is allowed on simd, for, parallel, sections, taskloop,
2399 teams and loop. Duplicate it on all of them, but omit on for or
2400 sections if parallel is present (unless inscan, in that case
2401 omit on parallel). If taskloop or loop is combined with
2402 parallel, omit it on parallel. */
2403 case OMP_CLAUSE_REDUCTION:
2404 if (OMP_CLAUSE_REDUCTION_TASK (clauses))
2406 if (code == OMP_SIMD || code == OMP_LOOP)
2408 error_at (OMP_CLAUSE_LOCATION (clauses),
2409 "invalid %<task%> reduction modifier on construct "
2410 "combined with %<simd%> or %<loop%>");
2411 OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2413 else if (code != OMP_SECTIONS
2414 && (mask & (OMP_CLAUSE_MASK_1
2415 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0
2416 && (mask & (OMP_CLAUSE_MASK_1
2417 << PRAGMA_OMP_CLAUSE_SCHEDULE)) == 0)
2419 error_at (OMP_CLAUSE_LOCATION (clauses),
2420 "invalid %<task%> reduction modifier on construct "
2421 "not combined with %<parallel%>, %<for%> or "
2422 "%<sections%>");
2423 OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2426 if (OMP_CLAUSE_REDUCTION_INSCAN (clauses)
2427 && ((mask & ((OMP_CLAUSE_MASK_1
2428 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
2429 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)))
2430 != 0))
2432 error_at (OMP_CLAUSE_LOCATION (clauses),
2433 "%<inscan%> %<reduction%> clause on construct other "
2434 "than %<for%>, %<simd%>, %<for simd%>, "
2435 "%<parallel for%>, %<parallel for simd%>");
2436 OMP_CLAUSE_REDUCTION_INSCAN (clauses) = 0;
2438 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2440 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2441 OMP_CLAUSE_MAP);
2442 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2443 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2444 OMP_CLAUSE_MAP_IMPLICIT (c) = 1;
2445 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2446 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2448 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2450 if (code == OMP_SIMD)
2452 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2453 OMP_CLAUSE_REDUCTION);
2454 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2455 OMP_CLAUSE_REDUCTION_CODE (c)
2456 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2457 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2458 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2459 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2460 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2461 OMP_CLAUSE_REDUCTION_INSCAN (c)
2462 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2463 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2464 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2466 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2467 != 0)
2469 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2470 OMP_CLAUSE_REDUCTION);
2471 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2472 OMP_CLAUSE_REDUCTION_CODE (c)
2473 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2474 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2475 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2476 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2477 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2478 OMP_CLAUSE_REDUCTION_INSCAN (c)
2479 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2480 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2481 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2482 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2484 else if ((mask & (OMP_CLAUSE_MASK_1
2485 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0
2486 && !OMP_CLAUSE_REDUCTION_INSCAN (clauses))
2487 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2488 else
2489 s = C_OMP_CLAUSE_SPLIT_FOR;
2491 else if (code == OMP_SECTIONS
2492 || code == OMP_PARALLEL
2493 || code == OMP_MASTER
2494 || code == OMP_MASKED)
2495 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2496 else if (code == OMP_TASKLOOP)
2497 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2498 else if (code == OMP_LOOP)
2499 s = C_OMP_CLAUSE_SPLIT_LOOP;
2500 else if (code == OMP_SIMD)
2502 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2503 != 0)
2505 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2506 OMP_CLAUSE_REDUCTION);
2507 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2508 OMP_CLAUSE_REDUCTION_CODE (c)
2509 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2510 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2511 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2512 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2513 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2514 OMP_CLAUSE_REDUCTION_INSCAN (c)
2515 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2516 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2517 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2519 else if ((mask & (OMP_CLAUSE_MASK_1
2520 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2522 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2523 OMP_CLAUSE_REDUCTION);
2524 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2525 OMP_CLAUSE_REDUCTION_CODE (c)
2526 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2527 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2528 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2529 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2530 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2531 OMP_CLAUSE_REDUCTION_INSCAN (c)
2532 = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2533 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2534 cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2536 s = C_OMP_CLAUSE_SPLIT_SIMD;
2538 else
2539 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2540 break;
2541 case OMP_CLAUSE_IN_REDUCTION:
2542 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2544 /* When on target, map(always, tofrom: item) is added as
2545 well. For non-combined target it is added in the FEs. */
2546 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2547 OMP_CLAUSE_MAP);
2548 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2549 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_TOFROM);
2550 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2551 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2552 s = C_OMP_CLAUSE_SPLIT_TARGET;
2553 break;
2555 /* in_reduction on taskloop simd becomes reduction on the simd
2556 and keeps being in_reduction on taskloop. */
2557 if (code == OMP_SIMD)
2559 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2560 OMP_CLAUSE_REDUCTION);
2561 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2562 OMP_CLAUSE_REDUCTION_CODE (c)
2563 = OMP_CLAUSE_REDUCTION_CODE (clauses);
2564 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2565 = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2566 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2567 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2568 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2569 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2571 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2572 break;
2573 case OMP_CLAUSE_IF:
2574 if (OMP_CLAUSE_IF_MODIFIER (clauses) != ERROR_MARK)
2576 s = C_OMP_CLAUSE_SPLIT_COUNT;
2577 switch (OMP_CLAUSE_IF_MODIFIER (clauses))
2579 case OMP_PARALLEL:
2580 if ((mask & (OMP_CLAUSE_MASK_1
2581 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2582 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2583 break;
2584 case OMP_SIMD:
2585 if (code == OMP_SIMD)
2586 s = C_OMP_CLAUSE_SPLIT_SIMD;
2587 break;
2588 case OMP_TASKLOOP:
2589 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2590 != 0)
2591 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2592 break;
2593 case OMP_TARGET:
2594 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2595 != 0)
2596 s = C_OMP_CLAUSE_SPLIT_TARGET;
2597 break;
2598 default:
2599 break;
2601 if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2602 break;
2603 /* Error-recovery here, invalid if-modifier specified, add the
2604 clause to just one construct. */
2605 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2606 s = C_OMP_CLAUSE_SPLIT_TARGET;
2607 else if ((mask & (OMP_CLAUSE_MASK_1
2608 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2609 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2610 else if ((mask & (OMP_CLAUSE_MASK_1
2611 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2612 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2613 else if (code == OMP_SIMD)
2614 s = C_OMP_CLAUSE_SPLIT_SIMD;
2615 else
2616 gcc_unreachable ();
2617 break;
2619 /* Otherwise, duplicate if clause to all constructs. */
2620 if (code == OMP_SIMD)
2622 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)
2623 | (OMP_CLAUSE_MASK_1
2624 << PRAGMA_OMP_CLAUSE_NUM_THREADS)
2625 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)))
2626 != 0)
2628 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2629 OMP_CLAUSE_IF);
2630 OMP_CLAUSE_IF_MODIFIER (c)
2631 = OMP_CLAUSE_IF_MODIFIER (clauses);
2632 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2633 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2634 cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2636 else
2638 s = C_OMP_CLAUSE_SPLIT_SIMD;
2639 break;
2642 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2643 != 0)
2645 if ((mask & (OMP_CLAUSE_MASK_1
2646 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2648 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2649 OMP_CLAUSE_IF);
2650 OMP_CLAUSE_IF_MODIFIER (c)
2651 = OMP_CLAUSE_IF_MODIFIER (clauses);
2652 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2653 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2654 cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2655 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2657 else
2658 s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2660 else if ((mask & (OMP_CLAUSE_MASK_1
2661 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2663 if ((mask & (OMP_CLAUSE_MASK_1
2664 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2666 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2667 OMP_CLAUSE_IF);
2668 OMP_CLAUSE_IF_MODIFIER (c)
2669 = OMP_CLAUSE_IF_MODIFIER (clauses);
2670 OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2671 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2672 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2673 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2675 else
2676 s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2678 else
2679 s = C_OMP_CLAUSE_SPLIT_TARGET;
2680 break;
2681 case OMP_CLAUSE_LINEAR:
2682 /* Linear clause is allowed on simd and for. Put it on the
2683 innermost construct. */
2684 if (code == OMP_SIMD)
2685 s = C_OMP_CLAUSE_SPLIT_SIMD;
2686 else
2687 s = C_OMP_CLAUSE_SPLIT_FOR;
2688 break;
2689 case OMP_CLAUSE_NOWAIT:
2690 /* Nowait clause is allowed on target, for and sections, but
2691 is not allowed on parallel for or parallel sections. Therefore,
2692 put it on target construct if present, because that can only
2693 be combined with parallel for{, simd} and not with for{, simd},
2694 otherwise to the worksharing construct. */
2695 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2696 != 0)
2697 s = C_OMP_CLAUSE_SPLIT_TARGET;
2698 else
2699 s = C_OMP_CLAUSE_SPLIT_FOR;
2700 break;
2701 /* thread_limit is allowed on target and teams. Distribute it
2702 to all. */
2703 case OMP_CLAUSE_THREAD_LIMIT:
2704 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2705 != 0)
2707 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2708 != 0)
2710 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2711 OMP_CLAUSE_THREAD_LIMIT);
2712 OMP_CLAUSE_THREAD_LIMIT_EXPR (c)
2713 = OMP_CLAUSE_THREAD_LIMIT_EXPR (clauses);
2714 OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2715 cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2717 else
2719 s = C_OMP_CLAUSE_SPLIT_TARGET;
2720 break;
2723 s = C_OMP_CLAUSE_SPLIT_TEAMS;
2724 break;
2725 /* Allocate clause is allowed on target, teams, distribute, parallel,
2726 for, sections and taskloop. Distribute it to all. */
2727 case OMP_CLAUSE_ALLOCATE:
2728 s = C_OMP_CLAUSE_SPLIT_COUNT;
2729 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2731 switch (i)
2733 case C_OMP_CLAUSE_SPLIT_TARGET:
2734 if ((mask & (OMP_CLAUSE_MASK_1
2735 << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2736 continue;
2737 break;
2738 case C_OMP_CLAUSE_SPLIT_TEAMS:
2739 if ((mask & (OMP_CLAUSE_MASK_1
2740 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2741 continue;
2742 break;
2743 case C_OMP_CLAUSE_SPLIT_DISTRIBUTE:
2744 if ((mask & (OMP_CLAUSE_MASK_1
2745 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0)
2746 continue;
2747 break;
2748 case C_OMP_CLAUSE_SPLIT_PARALLEL:
2749 if ((mask & (OMP_CLAUSE_MASK_1
2750 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2751 continue;
2752 break;
2753 case C_OMP_CLAUSE_SPLIT_FOR:
2754 STATIC_ASSERT (C_OMP_CLAUSE_SPLIT_SECTIONS
2755 == C_OMP_CLAUSE_SPLIT_FOR
2756 && (C_OMP_CLAUSE_SPLIT_TASKLOOP
2757 == C_OMP_CLAUSE_SPLIT_FOR)
2758 && (C_OMP_CLAUSE_SPLIT_LOOP
2759 == C_OMP_CLAUSE_SPLIT_FOR));
2760 if (code == OMP_SECTIONS)
2761 break;
2762 if ((mask & (OMP_CLAUSE_MASK_1
2763 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2764 break;
2765 if ((mask & (OMP_CLAUSE_MASK_1
2766 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2767 break;
2768 continue;
2769 case C_OMP_CLAUSE_SPLIT_SIMD:
2770 continue;
2771 default:
2772 gcc_unreachable ();
2774 if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2776 c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2777 OMP_CLAUSE_ALLOCATE);
2778 OMP_CLAUSE_DECL (c)
2779 = OMP_CLAUSE_DECL (clauses);
2780 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
2781 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (clauses);
2782 OMP_CLAUSE_ALLOCATE_ALIGN (c)
2783 = OMP_CLAUSE_ALLOCATE_ALIGN (clauses);
2784 OMP_CLAUSE_CHAIN (c) = cclauses[s];
2785 cclauses[s] = c;
2786 has_dup_allocate = true;
2788 s = (enum c_omp_clause_split) i;
2790 gcc_assert (s != C_OMP_CLAUSE_SPLIT_COUNT);
2791 break;
2792 default:
2793 gcc_unreachable ();
2795 OMP_CLAUSE_CHAIN (clauses) = cclauses[s];
2796 cclauses[s] = clauses;
2799 if (has_dup_allocate)
2801 bool need_prune = false;
2802 bitmap_obstack_initialize (NULL);
2803 for (i = 0; i < C_OMP_CLAUSE_SPLIT_SIMD - (code == OMP_LOOP); i++)
2804 if (cclauses[i])
2806 bitmap_head allocate_head;
2807 bitmap_initialize (&allocate_head, &bitmap_default_obstack);
2808 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2809 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2810 && DECL_P (OMP_CLAUSE_DECL (c)))
2811 bitmap_set_bit (&allocate_head,
2812 DECL_UID (OMP_CLAUSE_DECL (c)));
2813 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2814 switch (OMP_CLAUSE_CODE (c))
2816 case OMP_CLAUSE_REDUCTION:
2817 case OMP_CLAUSE_IN_REDUCTION:
2818 case OMP_CLAUSE_TASK_REDUCTION:
2819 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
2821 tree t = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
2822 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2823 t = TREE_OPERAND (t, 0);
2824 if (TREE_CODE (t) == ADDR_EXPR
2825 || INDIRECT_REF_P (t))
2826 t = TREE_OPERAND (t, 0);
2827 if (DECL_P (t))
2828 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2829 break;
2831 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST)
2833 /* TODO: This can go away once we transition all uses of
2834 TREE_LIST for representing OMP array sections to
2835 OMP_ARRAY_SECTION. */
2836 tree t;
2837 for (t = OMP_CLAUSE_DECL (c);
2838 TREE_CODE (t) == TREE_LIST; t = TREE_CHAIN (t))
2840 if (DECL_P (t))
2841 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2842 break;
2844 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == OMP_ARRAY_SECTION)
2846 tree t;
2847 for (t = OMP_CLAUSE_DECL (c);
2848 TREE_CODE (t) == OMP_ARRAY_SECTION;
2849 t = TREE_OPERAND (t, 0))
2851 if (DECL_P (t))
2852 bitmap_clear_bit (&allocate_head, DECL_UID (t));
2853 break;
2855 /* FALLTHRU */
2856 case OMP_CLAUSE_PRIVATE:
2857 case OMP_CLAUSE_FIRSTPRIVATE:
2858 case OMP_CLAUSE_LASTPRIVATE:
2859 case OMP_CLAUSE_LINEAR:
2860 if (DECL_P (OMP_CLAUSE_DECL (c)))
2861 bitmap_clear_bit (&allocate_head,
2862 DECL_UID (OMP_CLAUSE_DECL (c)));
2863 break;
2864 default:
2865 break;
2867 for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2869 && DECL_P (OMP_CLAUSE_DECL (c))
2870 && bitmap_bit_p (&allocate_head,
2871 DECL_UID (OMP_CLAUSE_DECL (c))))
2873 /* Mark allocate clauses which don't have corresponding
2874 explicit data sharing clause. */
2875 OMP_CLAUSE_ALLOCATE_COMBINED (c) = 1;
2876 need_prune = true;
2879 bitmap_obstack_release (NULL);
2880 if (need_prune)
2882 /* At least one allocate clause has been marked. Walk all the
2883 duplicated allocate clauses in sync. If it is marked in all
2884 constituent constructs, diagnose it as invalid and remove
2885 them. Otherwise, remove all marked inner clauses inside
2886 a construct that doesn't have them marked. Keep the outer
2887 marked ones, because some clause duplication is done only
2888 during gimplification. */
2889 tree *p[C_OMP_CLAUSE_SPLIT_COUNT];
2890 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2891 if (cclauses[i] == NULL_TREE
2892 || i == C_OMP_CLAUSE_SPLIT_SIMD
2893 || (i == C_OMP_CLAUSE_SPLIT_LOOP && code == OMP_LOOP))
2894 p[i] = NULL;
2895 else
2896 p[i] = &cclauses[i];
2899 int j = -1;
2900 tree seen = NULL_TREE;
2901 for (i = C_OMP_CLAUSE_SPLIT_COUNT - 1; i >= 0; i--)
2902 if (p[i])
2904 while (*p[i]
2905 && OMP_CLAUSE_CODE (*p[i]) != OMP_CLAUSE_ALLOCATE)
2906 p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2907 if (*p[i] == NULL_TREE)
2909 i = C_OMP_CLAUSE_SPLIT_COUNT;
2910 break;
2912 if (!OMP_CLAUSE_ALLOCATE_COMBINED (*p[i]) && j == -1)
2913 j = i;
2914 seen = *p[i];
2916 if (i == C_OMP_CLAUSE_SPLIT_COUNT)
2917 break;
2918 if (j == -1)
2919 error_at (OMP_CLAUSE_LOCATION (seen),
2920 "%qD specified in %<allocate%> clause but not in "
2921 "an explicit privatization clause",
2922 OMP_CLAUSE_DECL (seen));
2923 for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2924 if (p[i])
2926 if (i > j)
2927 /* Remove. */
2928 *p[i] = OMP_CLAUSE_CHAIN (*p[i]);
2929 else
2930 /* Keep. */
2931 p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2934 while (1);
2938 if (!flag_checking)
2939 return;
2941 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2942 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TARGET] == NULL_TREE);
2943 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2944 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] == NULL_TREE);
2945 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0
2946 && (mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_FILTER)) == 0)
2947 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] == NULL_TREE);
2948 if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2949 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] == NULL_TREE);
2950 if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
2951 | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))) == 0
2952 && code != OMP_SECTIONS
2953 && code != OMP_LOOP)
2954 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_FOR] == NULL_TREE);
2955 if (code != OMP_SIMD)
2956 gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_SIMD] == NULL_TREE);
2960 /* qsort callback to compare #pragma omp declare simd clauses. */
2962 static int
2963 c_omp_declare_simd_clause_cmp (const void *p, const void *q)
2965 tree a = *(const tree *) p;
2966 tree b = *(const tree *) q;
2967 if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_CODE (b))
2969 if (OMP_CLAUSE_CODE (a) > OMP_CLAUSE_CODE (b))
2970 return -1;
2971 return 1;
2973 if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_SIMDLEN
2974 && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_INBRANCH
2975 && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_NOTINBRANCH)
2977 int c = tree_to_shwi (OMP_CLAUSE_DECL (a));
2978 int d = tree_to_shwi (OMP_CLAUSE_DECL (b));
2979 if (c < d)
2980 return 1;
2981 if (c > d)
2982 return -1;
2984 return 0;
2987 /* Change PARM_DECLs in OMP_CLAUSE_DECL of #pragma omp declare simd
2988 CLAUSES on FNDECL into argument indexes and sort them. */
2990 tree
2991 c_omp_declare_simd_clauses_to_numbers (tree parms, tree clauses)
2993 tree c;
2994 vec<tree> clvec = vNULL;
2996 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2998 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
2999 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
3000 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
3002 tree decl = OMP_CLAUSE_DECL (c);
3003 tree arg;
3004 int idx;
3005 for (arg = parms, idx = 0; arg;
3006 arg = TREE_CHAIN (arg), idx++)
3007 if (arg == decl)
3008 break;
3009 if (arg == NULL_TREE)
3011 error_at (OMP_CLAUSE_LOCATION (c),
3012 "%qD is not a function argument", decl);
3013 continue;
3015 OMP_CLAUSE_DECL (c) = build_int_cst (integer_type_node, idx);
3016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3017 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
3019 decl = OMP_CLAUSE_LINEAR_STEP (c);
3020 for (arg = parms, idx = 0; arg;
3021 arg = TREE_CHAIN (arg), idx++)
3022 if (arg == decl)
3023 break;
3024 if (arg == NULL_TREE)
3026 error_at (OMP_CLAUSE_LOCATION (c),
3027 "%qD is not a function argument", decl);
3028 continue;
3030 OMP_CLAUSE_LINEAR_STEP (c)
3031 = build_int_cst (integer_type_node, idx);
3034 clvec.safe_push (c);
3036 if (!clvec.is_empty ())
3038 unsigned int len = clvec.length (), i;
3039 clvec.qsort (c_omp_declare_simd_clause_cmp);
3040 clauses = clvec[0];
3041 for (i = 0; i < len; i++)
3042 OMP_CLAUSE_CHAIN (clvec[i]) = (i < len - 1) ? clvec[i + 1] : NULL_TREE;
3044 else
3045 clauses = NULL_TREE;
3046 clvec.release ();
3047 return clauses;
3050 /* Change argument indexes in CLAUSES of FNDECL back to PARM_DECLs. */
3052 void
3053 c_omp_declare_simd_clauses_to_decls (tree fndecl, tree clauses)
3055 tree c;
3057 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3058 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
3059 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
3060 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
3062 int idx = tree_to_shwi (OMP_CLAUSE_DECL (c)), i;
3063 tree arg;
3064 for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
3065 arg = TREE_CHAIN (arg), i++)
3066 if (i == idx)
3067 break;
3068 gcc_assert (arg);
3069 OMP_CLAUSE_DECL (c) = arg;
3070 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
3071 && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
3073 idx = tree_to_shwi (OMP_CLAUSE_LINEAR_STEP (c));
3074 for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
3075 arg = TREE_CHAIN (arg), i++)
3076 if (i == idx)
3077 break;
3078 gcc_assert (arg);
3079 OMP_CLAUSE_LINEAR_STEP (c) = arg;
3084 /* Return true for __func__ and similar function-local predefined
3085 variables (which are in OpenMP predetermined shared, allowed in
3086 shared/firstprivate clauses). */
3088 bool
3089 c_omp_predefined_variable (tree decl)
3091 if (VAR_P (decl)
3092 && DECL_ARTIFICIAL (decl)
3093 && TREE_STATIC (decl)
3094 && DECL_NAME (decl))
3096 if (TREE_READONLY (decl)
3097 && (DECL_NAME (decl) == ridpointers[RID_C99_FUNCTION_NAME]
3098 || DECL_NAME (decl) == ridpointers[RID_FUNCTION_NAME]
3099 || DECL_NAME (decl) == ridpointers[RID_PRETTY_FUNCTION_NAME]))
3100 return true;
3101 /* For UBSan handle the same also ubsan_create_data created
3102 variables. There is no magic flag for those, but user variables
3103 shouldn't be DECL_ARTIFICIAL or have TYPE_ARTIFICIAL type with
3104 such names. */
3105 if ((flag_sanitize & (SANITIZE_UNDEFINED
3106 | SANITIZE_UNDEFINED_NONDEFAULT)) != 0
3107 && DECL_IGNORED_P (decl)
3108 && !TREE_READONLY (decl)
3109 && TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE
3110 && TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
3111 && TYPE_ARTIFICIAL (TREE_TYPE (decl))
3112 && TYPE_NAME (TREE_TYPE (decl))
3113 && TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
3114 && DECL_NAME (TYPE_NAME (TREE_TYPE (decl)))
3115 && (TREE_CODE (DECL_NAME (TYPE_NAME (TREE_TYPE (decl))))
3116 == IDENTIFIER_NODE))
3118 tree id1 = DECL_NAME (decl);
3119 tree id2 = DECL_NAME (TYPE_NAME (TREE_TYPE (decl)));
3120 if (IDENTIFIER_LENGTH (id1) >= sizeof ("ubsan_data") - 1
3121 && IDENTIFIER_LENGTH (id2) >= sizeof ("__ubsan__data")
3122 && !memcmp (IDENTIFIER_POINTER (id2), "__ubsan_",
3123 sizeof ("__ubsan_") - 1)
3124 && !memcmp (IDENTIFIER_POINTER (id2) + IDENTIFIER_LENGTH (id2)
3125 - sizeof ("_data") + 1, "_data",
3126 sizeof ("_data") - 1)
3127 && strstr (IDENTIFIER_POINTER (id1), "ubsan_data"))
3128 return true;
3131 return false;
3134 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute of DECL
3135 is predetermined. */
3137 enum omp_clause_default_kind
3138 c_omp_predetermined_sharing (tree decl)
3140 /* Predetermine artificial variables holding integral values, those
3141 are usually result of gimplify_one_sizepos or SAVE_EXPR
3142 gimplification. */
3143 if (VAR_P (decl)
3144 && DECL_ARTIFICIAL (decl)
3145 && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
3146 return OMP_CLAUSE_DEFAULT_SHARED;
3148 if (c_omp_predefined_variable (decl))
3149 return OMP_CLAUSE_DEFAULT_SHARED;
3151 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
3154 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
3155 of DECL is predetermined. */
3157 enum omp_clause_defaultmap_kind
3158 c_omp_predetermined_mapping (tree decl)
3160 /* Predetermine artificial variables holding integral values, those
3161 are usually result of gimplify_one_sizepos or SAVE_EXPR
3162 gimplification. */
3163 if (VAR_P (decl)
3164 && DECL_ARTIFICIAL (decl)
3165 && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
3166 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
3168 if (c_omp_predefined_variable (decl))
3169 return OMP_CLAUSE_DEFAULTMAP_TO;
3171 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
3175 /* Used to merge map clause information in c_omp_adjust_map_clauses. */
3176 struct map_clause
3178 tree clause;
3179 bool firstprivate_ptr_p;
3180 bool decl_mapped;
3181 bool omp_declare_target;
3182 map_clause (void) : clause (NULL_TREE), firstprivate_ptr_p (false),
3183 decl_mapped (false), omp_declare_target (false) { }
3186 /* Adjust map clauses after normal clause parsing, mainly to mark specific
3187 base-pointer map cases addressable that may be turned into attach/detach
3188 operations during gimplification. */
3189 void
3190 c_omp_adjust_map_clauses (tree clauses, bool is_target)
3192 if (!is_target)
3194 /* If this is not a target construct, just turn firstprivate pointers
3195 into attach/detach, the runtime will check and do the rest. */
3197 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3198 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3199 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3200 && DECL_P (OMP_CLAUSE_DECL (c))
3201 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3203 tree ptr = OMP_CLAUSE_DECL (c);
3204 c_common_mark_addressable_vec (ptr);
3206 return;
3209 hash_map<tree, map_clause> maps;
3211 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3213 && DECL_P (OMP_CLAUSE_DECL (c)))
3215 /* If this is for a target construct, the firstprivate pointer
3216 is marked addressable if either is true:
3217 (1) the base-pointer is mapped in this same construct, or
3218 (2) the base-pointer is a variable place on the device by
3219 "declare target" directives.
3221 Here we iterate through all map clauses collecting these cases,
3222 and merge them with a hash_map to process below. */
3224 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3225 && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3227 tree ptr = OMP_CLAUSE_DECL (c);
3228 map_clause &mc = maps.get_or_insert (ptr);
3229 if (mc.clause == NULL_TREE)
3230 mc.clause = c;
3231 mc.firstprivate_ptr_p = true;
3233 if (is_global_var (ptr)
3234 && lookup_attribute ("omp declare target",
3235 DECL_ATTRIBUTES (ptr)))
3236 mc.omp_declare_target = true;
3238 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALLOC
3239 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO
3240 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM
3241 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TOFROM
3242 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
3243 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_FROM
3244 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
3246 map_clause &mc = maps.get_or_insert (OMP_CLAUSE_DECL (c));
3247 mc.decl_mapped = true;
3251 for (hash_map<tree, map_clause>::iterator i = maps.begin ();
3252 i != maps.end (); ++i)
3254 map_clause &mc = (*i).second;
3256 if (mc.firstprivate_ptr_p
3257 && (mc.decl_mapped || mc.omp_declare_target))
3258 c_common_mark_addressable_vec (OMP_CLAUSE_DECL (mc.clause));
3262 /* Maybe strip off an indirection from a "converted" reference, then find the
3263 origin of a pointer (i.e. without any offset). */
3265 tree
3266 c_omp_address_inspector::unconverted_ref_origin ()
3268 tree t = orig;
3270 /* We may have a reference-typed component access at the outermost level
3271 that has had convert_from_reference called on it. Get the un-dereferenced
3272 reference itself. */
3273 t = maybe_unconvert_ref (t);
3275 /* Find base pointer for POINTER_PLUS_EXPR, etc. */
3276 t = get_origin (t);
3278 return t;
3281 /* Return TRUE if the address is a component access. */
3283 bool
3284 c_omp_address_inspector::component_access_p ()
3286 tree t = maybe_unconvert_ref (orig);
3288 t = get_origin (t);
3290 return TREE_CODE (t) == COMPONENT_REF;
3293 /* Perform various checks on the address, as described by clause CLAUSE (we
3294 only use its code and location here). */
3296 bool
3297 c_omp_address_inspector::check_clause (tree clause)
3299 tree t = unconverted_ref_origin ();
3301 if (TREE_CODE (t) != COMPONENT_REF)
3302 return true;
3304 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
3305 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
3307 error_at (OMP_CLAUSE_LOCATION (clause),
3308 "bit-field %qE in %qs clause",
3309 t, omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3310 return false;
3312 else if (!processing_template_decl_p ()
3313 && !omp_mappable_type (TREE_TYPE (t)))
3315 error_at (OMP_CLAUSE_LOCATION (clause),
3316 "%qE does not have a mappable type in %qs clause",
3317 t, omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3318 emit_unmappable_type_notes (TREE_TYPE (t));
3319 return false;
3321 else if (TREE_TYPE (t) && TYPE_ATOMIC (TREE_TYPE (t)))
3323 error_at (OMP_CLAUSE_LOCATION (clause),
3324 "%<_Atomic%> %qE in %qs clause", t,
3325 omp_clause_code_name[OMP_CLAUSE_CODE (clause)]);
3326 return false;
3329 return true;
3332 /* Find the "root term" for the address. This is the innermost decl, etc.
3333 of the access. */
3335 tree
3336 c_omp_address_inspector::get_root_term (bool checking)
3338 if (root_term && !checking)
3339 return root_term;
3341 tree t = unconverted_ref_origin ();
3343 while (TREE_CODE (t) == COMPONENT_REF)
3345 if (checking
3346 && TREE_TYPE (TREE_OPERAND (t, 0))
3347 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
3349 error_at (loc, "%qE is a member of a union", t);
3350 return error_mark_node;
3352 t = TREE_OPERAND (t, 0);
3353 while (TREE_CODE (t) == MEM_REF
3354 || TREE_CODE (t) == INDIRECT_REF
3355 || TREE_CODE (t) == ARRAY_REF)
3357 if (TREE_CODE (t) == MEM_REF
3358 || TREE_CODE (t) == INDIRECT_REF)
3359 indirections = true;
3360 t = TREE_OPERAND (t, 0);
3361 STRIP_NOPS (t);
3362 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
3363 t = TREE_OPERAND (t, 0);
3367 root_term = t;
3369 return t;
3372 /* Return TRUE if the address is supported in mapping clauses. At present,
3373 this means that the innermost expression is a DECL_P, but could be extended
3374 to other types of expression in the future. */
3376 bool
3377 c_omp_address_inspector::map_supported_p ()
3379 /* If we've already decided if the mapped address is supported, return
3380 that. */
3381 if (map_supported != -1)
3382 return map_supported;
3384 tree t = unconverted_ref_origin ();
3386 STRIP_NOPS (t);
3388 while (TREE_CODE (t) == INDIRECT_REF
3389 || TREE_CODE (t) == MEM_REF
3390 || TREE_CODE (t) == ARRAY_REF
3391 || TREE_CODE (t) == COMPONENT_REF
3392 || TREE_CODE (t) == COMPOUND_EXPR
3393 || TREE_CODE (t) == SAVE_EXPR
3394 || TREE_CODE (t) == POINTER_PLUS_EXPR
3395 || TREE_CODE (t) == NON_LVALUE_EXPR
3396 || TREE_CODE (t) == OMP_ARRAY_SECTION
3397 || TREE_CODE (t) == NOP_EXPR)
3398 if (TREE_CODE (t) == COMPOUND_EXPR)
3399 t = TREE_OPERAND (t, 1);
3400 else
3401 t = TREE_OPERAND (t, 0);
3403 STRIP_NOPS (t);
3405 map_supported = DECL_P (t);
3407 return map_supported;
3410 /* Get the origin of an address T, stripping off offsets and some other
3411 bits. */
3413 tree
3414 c_omp_address_inspector::get_origin (tree t)
3416 while (1)
3418 if (TREE_CODE (t) == COMPOUND_EXPR)
3420 t = TREE_OPERAND (t, 1);
3421 STRIP_NOPS (t);
3423 else if (TREE_CODE (t) == POINTER_PLUS_EXPR
3424 || TREE_CODE (t) == SAVE_EXPR)
3425 t = TREE_OPERAND (t, 0);
3426 else if (!processing_template_decl_p ()
3427 && TREE_CODE (t) == INDIRECT_REF
3428 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == REFERENCE_TYPE)
3429 t = TREE_OPERAND (t, 0);
3430 else
3431 break;
3433 STRIP_NOPS (t);
3434 return t;
3437 /* For an address T that might be a reference that has had
3438 "convert_from_reference" called on it, return the actual reference without
3439 any indirection. */
3441 tree
3442 c_omp_address_inspector::maybe_unconvert_ref (tree t)
3444 /* Be careful not to dereference the type if we're processing a
3445 template decl, else it might be NULL. */
3446 if (!processing_template_decl_p ()
3447 && TREE_CODE (t) == INDIRECT_REF
3448 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == REFERENCE_TYPE)
3449 return TREE_OPERAND (t, 0);
3451 return t;
3454 /* Return TRUE if CLAUSE might describe a zero-length array section. */
3456 bool
3457 c_omp_address_inspector::maybe_zero_length_array_section (tree clause)
3459 switch (OMP_CLAUSE_MAP_KIND (clause))
3461 case GOMP_MAP_ALLOC:
3462 case GOMP_MAP_IF_PRESENT:
3463 case GOMP_MAP_TO:
3464 case GOMP_MAP_FROM:
3465 case GOMP_MAP_TOFROM:
3466 case GOMP_MAP_ALWAYS_TO:
3467 case GOMP_MAP_ALWAYS_FROM:
3468 case GOMP_MAP_ALWAYS_TOFROM:
3469 case GOMP_MAP_PRESENT_ALLOC:
3470 case GOMP_MAP_PRESENT_TO:
3471 case GOMP_MAP_PRESENT_FROM:
3472 case GOMP_MAP_PRESENT_TOFROM:
3473 case GOMP_MAP_ALWAYS_PRESENT_TO:
3474 case GOMP_MAP_ALWAYS_PRESENT_FROM:
3475 case GOMP_MAP_ALWAYS_PRESENT_TOFROM:
3476 case GOMP_MAP_RELEASE:
3477 case GOMP_MAP_DELETE:
3478 case GOMP_MAP_FORCE_TO:
3479 case GOMP_MAP_FORCE_FROM:
3480 case GOMP_MAP_FORCE_TOFROM:
3481 case GOMP_MAP_FORCE_PRESENT:
3482 return true;
3483 default:
3484 return false;
3488 /* Expand a chained access. We only expect to see a quite limited range of
3489 expression types here, because e.g. you can't have an array of
3490 references. */
3492 static tree
3493 omp_expand_access_chain (tree c, tree expr, vec<omp_addr_token *> &addr_tokens,
3494 unsigned *idx, c_omp_region_type ort)
3496 using namespace omp_addr_tokenizer;
3497 location_t loc = OMP_CLAUSE_LOCATION (c);
3498 unsigned i = *idx;
3499 tree c2 = NULL_TREE;
3500 gomp_map_kind kind;
3502 if ((ort & C_ORT_EXIT_DATA) != 0
3503 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
3504 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3505 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM
3506 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DELETE
3507 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_RELEASE
3508 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_FROM
3509 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_FROM
3510 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_PRESENT_FROM
3511 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_PRESENT_FROM)))
3512 kind = GOMP_MAP_DETACH;
3513 else
3514 kind = GOMP_MAP_ATTACH;
3516 switch (addr_tokens[i]->u.access_kind)
3518 case ACCESS_POINTER:
3519 case ACCESS_POINTER_OFFSET:
3521 tree virtual_origin
3522 = fold_convert_loc (loc, ptrdiff_type_node, addr_tokens[i]->expr);
3523 tree data_addr = omp_accessed_addr (addr_tokens, i, expr);
3524 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3525 OMP_CLAUSE_SET_MAP_KIND (c2, kind);
3526 OMP_CLAUSE_DECL (c2) = addr_tokens[i]->expr;
3527 OMP_CLAUSE_SIZE (c2)
3528 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3529 fold_convert_loc (loc, ptrdiff_type_node,
3530 data_addr),
3531 virtual_origin);
3533 break;
3535 case ACCESS_INDEXED_ARRAY:
3536 break;
3538 default:
3539 return error_mark_node;
3542 if (c2)
3544 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
3545 OMP_CLAUSE_CHAIN (c) = c2;
3546 c = c2;
3549 *idx = ++i;
3551 if (i < addr_tokens.length ()
3552 && addr_tokens[i]->type == ACCESS_METHOD)
3553 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
3555 return c;
3558 /* Translate "array_base_decl access_method" to OMP mapping clauses. */
3560 tree
3561 c_omp_address_inspector::expand_array_base (tree c,
3562 vec<omp_addr_token *> &addr_tokens,
3563 tree expr, unsigned *idx,
3564 c_omp_region_type ort)
3566 using namespace omp_addr_tokenizer;
3567 location_t loc = OMP_CLAUSE_LOCATION (c);
3568 int i = *idx;
3569 tree decl = addr_tokens[i + 1]->expr;
3570 bool decl_p = DECL_P (decl);
3571 bool declare_target_p = (decl_p
3572 && is_global_var (decl)
3573 && lookup_attribute ("omp declare target",
3574 DECL_ATTRIBUTES (decl)));
3575 bool map_p = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP;
3576 bool implicit_p = map_p && OMP_CLAUSE_MAP_IMPLICIT (c);
3577 bool chain_p = omp_access_chain_p (addr_tokens, i + 1);
3578 tree c2 = NULL_TREE, c3 = NULL_TREE;
3579 unsigned consume_tokens = 2;
3580 bool target_p = (ort & C_ORT_TARGET) != 0;
3581 bool openmp_p = (ort & C_ORT_OMP) != 0;
3583 gcc_assert (i == 0);
3585 if (!openmp_p
3586 && map_p
3587 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
3588 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
3590 i += 2;
3591 *idx = i;
3592 return c;
3595 switch (addr_tokens[i + 1]->u.access_kind)
3597 case ACCESS_DIRECT:
3598 if (decl_p && !target_p)
3599 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3600 break;
3602 case ACCESS_REF:
3604 /* Copy the referenced object. Note that we do this even for !MAP_P
3605 clauses. */
3606 tree obj = convert_from_reference (addr_tokens[i + 1]->expr);
3607 if (TREE_CODE (TREE_TYPE (obj)) == ARRAY_TYPE)
3608 /* We have a ref to array: add a [0] element as the ME expects. */
3609 OMP_CLAUSE_DECL (c) = build_array_ref (loc, obj, integer_zero_node);
3610 else
3611 OMP_CLAUSE_DECL (c) = obj;
3612 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (obj));
3614 if (!map_p)
3616 if (decl_p)
3617 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3618 break;
3621 if (!target_p)
3622 break;
3624 /* If we have a reference to a pointer, avoid using
3625 FIRSTPRIVATE_REFERENCE here in case the pointer is modified in the
3626 offload region (we can only do that if the pointer does not point
3627 to a mapped block). We could avoid doing this if we don't have a
3628 FROM mapping... */
3629 bool ref_to_ptr = TREE_CODE (TREE_TYPE (obj)) == POINTER_TYPE;
3631 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3632 if (!ref_to_ptr
3633 && !declare_target_p
3634 && decl_p)
3635 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
3636 else
3638 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3639 if (decl_p)
3640 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3642 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3643 OMP_CLAUSE_SIZE (c2) = size_zero_node;
3645 if (ref_to_ptr)
3647 c3 = c2;
3648 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3649 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
3650 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3651 OMP_CLAUSE_SIZE (c2)
3652 = TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (c2)));
3655 break;
3657 case ACCESS_INDEXED_REF_TO_ARRAY:
3659 if (!map_p)
3661 if (decl_p)
3662 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3663 break;
3666 if (!target_p)
3667 break;
3669 tree virtual_origin
3670 = convert_from_reference (addr_tokens[i + 1]->expr);
3671 virtual_origin = build_fold_addr_expr (virtual_origin);
3672 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3673 virtual_origin);
3674 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3675 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3676 if (decl_p && target_p && !declare_target_p)
3678 /* It appears that omp-low.cc mishandles cases where we have a
3679 [reference to an] array of pointers such as:
3681 int *arr[N]; (or "int *(&arr)[N] = ...")
3682 #pragma omp target map(arr[a][b:c])
3683 { ... }
3685 in such cases chain_p will be true. For now, fall back to
3686 GOMP_MAP_POINTER. */
3687 enum gomp_map_kind k = chain_p ? GOMP_MAP_POINTER
3688 : GOMP_MAP_FIRSTPRIVATE_REFERENCE;
3689 OMP_CLAUSE_SET_MAP_KIND (c2, k);
3691 else
3693 if (decl_p)
3694 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3695 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3697 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3698 OMP_CLAUSE_SIZE (c2)
3699 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3700 fold_convert_loc (loc, ptrdiff_type_node,
3701 data_addr),
3702 virtual_origin);
3704 break;
3706 case ACCESS_INDEXED_ARRAY:
3708 if (!map_p)
3710 if (decl_p)
3711 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3712 break;
3715 /* The code handling "firstprivatize_array_bases" in gimplify.cc is
3716 relevant here. What do we need to create for arrays at this
3717 stage? (This condition doesn't feel quite right. FIXME?) */
3718 if (!target_p
3719 && (TREE_CODE (TREE_TYPE (addr_tokens[i + 1]->expr))
3720 == ARRAY_TYPE))
3721 break;
3723 tree virtual_origin
3724 = build_fold_addr_expr (addr_tokens[i + 1]->expr);
3725 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3726 virtual_origin);
3727 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3728 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3729 if (decl_p && target_p)
3731 /* See comment for ACCESS_INDEXED_REF_TO_ARRAY above. */
3732 enum gomp_map_kind k = chain_p ? GOMP_MAP_POINTER
3733 : GOMP_MAP_FIRSTPRIVATE_POINTER;
3734 OMP_CLAUSE_SET_MAP_KIND (c2, k);
3736 else
3738 if (decl_p)
3739 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3740 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3742 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3743 OMP_CLAUSE_SIZE (c2)
3744 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3745 fold_convert_loc (loc, ptrdiff_type_node,
3746 data_addr),
3747 virtual_origin);
3749 break;
3751 case ACCESS_POINTER:
3752 case ACCESS_POINTER_OFFSET:
3754 if (!map_p)
3756 if (decl_p)
3757 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3758 break;
3761 unsigned last_access = i + 1;
3762 tree virtual_origin;
3764 if (chain_p
3765 && addr_tokens[i + 2]->type == ACCESS_METHOD
3766 && addr_tokens[i + 2]->u.access_kind == ACCESS_INDEXED_ARRAY)
3768 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3769 consume_tokens = 3;
3770 chain_p = omp_access_chain_p (addr_tokens, i + 2);
3771 last_access = i + 2;
3772 virtual_origin
3773 = build_array_ref (loc, addr_tokens[last_access]->expr,
3774 integer_zero_node);
3775 virtual_origin = build_fold_addr_expr (virtual_origin);
3776 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3777 virtual_origin);
3779 else
3780 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3781 addr_tokens[last_access]->expr);
3782 tree data_addr = omp_accessed_addr (addr_tokens, last_access, expr);
3783 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3784 /* For OpenACC, use FIRSTPRIVATE_POINTER for decls even on non-compute
3785 regions (e.g. "acc data" constructs). It'll be removed anyway in
3786 gimplify.cc, but doing it this way maintains diagnostic
3787 behaviour. */
3788 if (decl_p && (target_p || !openmp_p) && !chain_p && !declare_target_p)
3789 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
3790 else
3792 if (decl_p)
3793 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3794 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3796 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3797 OMP_CLAUSE_SIZE (c2)
3798 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3799 fold_convert_loc (loc, ptrdiff_type_node,
3800 data_addr),
3801 virtual_origin);
3803 break;
3805 case ACCESS_REF_TO_POINTER:
3806 case ACCESS_REF_TO_POINTER_OFFSET:
3808 if (!map_p)
3810 if (decl_p)
3811 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3812 break;
3815 unsigned last_access = i + 1;
3816 tree virtual_origin;
3818 if (chain_p
3819 && addr_tokens[i + 2]->type == ACCESS_METHOD
3820 && addr_tokens[i + 2]->u.access_kind == ACCESS_INDEXED_ARRAY)
3822 /* !!! This seems wrong for ACCESS_POINTER_OFFSET. */
3823 consume_tokens = 3;
3824 chain_p = omp_access_chain_p (addr_tokens, i + 2);
3825 last_access = i + 2;
3826 virtual_origin
3827 = build_array_ref (loc, addr_tokens[last_access]->expr,
3828 integer_zero_node);
3829 virtual_origin = build_fold_addr_expr (virtual_origin);
3830 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3831 virtual_origin);
3833 else
3835 virtual_origin
3836 = convert_from_reference (addr_tokens[last_access]->expr);
3837 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3838 virtual_origin);
3841 tree data_addr = omp_accessed_addr (addr_tokens, last_access, expr);
3842 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3843 if (decl_p && target_p && !chain_p && !declare_target_p)
3845 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
3846 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3848 else
3850 if (decl_p)
3851 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
3852 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3853 OMP_CLAUSE_DECL (c2)
3854 = convert_from_reference (addr_tokens[i + 1]->expr);
3856 OMP_CLAUSE_SIZE (c2)
3857 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3858 fold_convert_loc (loc, ptrdiff_type_node,
3859 data_addr),
3860 virtual_origin);
3862 break;
3864 default:
3865 *idx = i + consume_tokens;
3866 return error_mark_node;
3869 if (c3)
3871 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c);
3872 OMP_CLAUSE_CHAIN (c2) = c3;
3873 OMP_CLAUSE_CHAIN (c) = c2;
3874 if (implicit_p)
3876 OMP_CLAUSE_MAP_IMPLICIT (c2) = 1;
3877 OMP_CLAUSE_MAP_IMPLICIT (c3) = 1;
3879 c = c3;
3881 else if (c2)
3883 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
3884 OMP_CLAUSE_CHAIN (c) = c2;
3885 if (implicit_p)
3886 OMP_CLAUSE_MAP_IMPLICIT (c2) = 1;
3887 c = c2;
3890 i += consume_tokens;
3891 *idx = i;
3893 if (chain_p && map_p)
3894 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
3896 return c;
3899 /* Translate "component_selector access_method" to OMP mapping clauses. */
3901 tree
3902 c_omp_address_inspector::expand_component_selector (tree c,
3903 vec<omp_addr_token *>
3904 &addr_tokens,
3905 tree expr, unsigned *idx,
3906 c_omp_region_type ort)
3908 using namespace omp_addr_tokenizer;
3909 location_t loc = OMP_CLAUSE_LOCATION (c);
3910 unsigned i = *idx;
3911 tree c2 = NULL_TREE, c3 = NULL_TREE;
3912 bool chain_p = omp_access_chain_p (addr_tokens, i + 1);
3913 bool map_p = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP;
3915 switch (addr_tokens[i + 1]->u.access_kind)
3917 case ACCESS_DIRECT:
3918 case ACCESS_INDEXED_ARRAY:
3919 break;
3921 case ACCESS_REF:
3923 /* Copy the referenced object. Note that we also do this for !MAP_P
3924 clauses. */
3925 tree obj = convert_from_reference (addr_tokens[i + 1]->expr);
3926 OMP_CLAUSE_DECL (c) = obj;
3927 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (obj));
3929 if (!map_p)
3930 break;
3932 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3933 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3934 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3935 OMP_CLAUSE_SIZE (c2) = size_zero_node;
3937 break;
3939 case ACCESS_INDEXED_REF_TO_ARRAY:
3941 if (!map_p)
3942 break;
3944 tree virtual_origin
3945 = convert_from_reference (addr_tokens[i + 1]->expr);
3946 virtual_origin = build_fold_addr_expr (virtual_origin);
3947 virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3948 virtual_origin);
3949 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3951 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3952 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3953 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3954 OMP_CLAUSE_SIZE (c2)
3955 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3956 fold_convert_loc (loc, ptrdiff_type_node,
3957 data_addr),
3958 virtual_origin);
3960 break;
3962 case ACCESS_POINTER:
3963 case ACCESS_POINTER_OFFSET:
3965 if (!map_p)
3966 break;
3968 tree virtual_origin
3969 = fold_convert_loc (loc, ptrdiff_type_node,
3970 addr_tokens[i + 1]->expr);
3971 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3973 c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
3974 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3975 OMP_CLAUSE_DECL (c2) = addr_tokens[i + 1]->expr;
3976 OMP_CLAUSE_SIZE (c2)
3977 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
3978 fold_convert_loc (loc, ptrdiff_type_node,
3979 data_addr),
3980 virtual_origin);
3982 break;
3984 case ACCESS_REF_TO_POINTER:
3985 case ACCESS_REF_TO_POINTER_OFFSET:
3987 if (!map_p)
3988 break;
3990 tree ptr = convert_from_reference (addr_tokens[i + 1]->expr);
3991 tree virtual_origin = fold_convert_loc (loc, ptrdiff_type_node,
3992 ptr);
3993 tree data_addr = omp_accessed_addr (addr_tokens, i + 1, expr);
3995 /* Attach the pointer... */
3996 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
3997 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
3998 OMP_CLAUSE_DECL (c2) = ptr;
3999 OMP_CLAUSE_SIZE (c2)
4000 = fold_build2_loc (loc, MINUS_EXPR, ptrdiff_type_node,
4001 fold_convert_loc (loc, ptrdiff_type_node,
4002 data_addr),
4003 virtual_origin);
4005 /* ...and also the reference. */
4006 c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
4007 OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_ATTACH_DETACH);
4008 OMP_CLAUSE_DECL (c3) = addr_tokens[i + 1]->expr;
4009 OMP_CLAUSE_SIZE (c3) = size_zero_node;
4011 break;
4013 default:
4014 *idx = i + 2;
4015 return error_mark_node;
4018 if (c3)
4020 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c);
4021 OMP_CLAUSE_CHAIN (c2) = c3;
4022 OMP_CLAUSE_CHAIN (c) = c2;
4023 c = c3;
4025 else if (c2)
4027 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
4028 OMP_CLAUSE_CHAIN (c) = c2;
4029 c = c2;
4032 i += 2;
4033 *idx = i;
4035 if (chain_p && map_p)
4036 return omp_expand_access_chain (c, expr, addr_tokens, idx, ort);
4038 return c;
4041 /* Expand a map clause into a group of mapping clauses, creating nodes to
4042 attach/detach pointers and so forth as necessary. */
4044 tree
4045 c_omp_address_inspector::expand_map_clause (tree c, tree expr,
4046 vec<omp_addr_token *> &addr_tokens,
4047 c_omp_region_type ort)
4049 using namespace omp_addr_tokenizer;
4050 unsigned i, length = addr_tokens.length ();
4052 for (i = 0; i < length;)
4054 int remaining = length - i;
4056 if (remaining >= 2
4057 && addr_tokens[i]->type == ARRAY_BASE
4058 && addr_tokens[i]->u.structure_base_kind == BASE_DECL
4059 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4061 c = expand_array_base (c, addr_tokens, expr, &i, ort);
4062 if (c == error_mark_node)
4063 return error_mark_node;
4065 else if (remaining >= 2
4066 && addr_tokens[i]->type == ARRAY_BASE
4067 && addr_tokens[i]->u.structure_base_kind == BASE_ARBITRARY_EXPR
4068 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4070 c = expand_array_base (c, addr_tokens, expr, &i, ort);
4071 if (c == error_mark_node)
4072 return error_mark_node;
4074 else if (remaining >= 2
4075 && addr_tokens[i]->type == STRUCTURE_BASE
4076 && addr_tokens[i]->u.structure_base_kind == BASE_DECL
4077 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4079 if (addr_tokens[i + 1]->u.access_kind == ACCESS_DIRECT)
4080 c_common_mark_addressable_vec (addr_tokens[i + 1]->expr);
4081 i += 2;
4082 while (addr_tokens[i]->type == ACCESS_METHOD)
4083 i++;
4085 else if (remaining >= 2
4086 && addr_tokens[i]->type == STRUCTURE_BASE
4087 && addr_tokens[i]->u.structure_base_kind == BASE_ARBITRARY_EXPR
4088 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4090 switch (addr_tokens[i + 1]->u.access_kind)
4092 case ACCESS_DIRECT:
4093 case ACCESS_POINTER:
4094 i += 2;
4095 while (addr_tokens[i]->type == ACCESS_METHOD)
4096 i++;
4097 break;
4098 default:
4099 return error_mark_node;
4102 else if (remaining >= 2
4103 && addr_tokens[i]->type == COMPONENT_SELECTOR
4104 && addr_tokens[i + 1]->type == ACCESS_METHOD)
4106 c = expand_component_selector (c, addr_tokens, expr, &i, ort);
4107 /* We used 'expr', so these must have been the last tokens. */
4108 gcc_assert (i == length);
4109 if (c == error_mark_node)
4110 return error_mark_node;
4112 else if (remaining >= 3
4113 && addr_tokens[i]->type == COMPONENT_SELECTOR
4114 && addr_tokens[i + 1]->type == STRUCTURE_BASE
4115 && (addr_tokens[i + 1]->u.structure_base_kind
4116 == BASE_COMPONENT_EXPR)
4117 && addr_tokens[i + 2]->type == ACCESS_METHOD)
4119 i += 3;
4120 while (addr_tokens[i]->type == ACCESS_METHOD)
4121 i++;
4123 else
4124 break;
4127 if (i == length)
4128 return c;
4130 return error_mark_node;
4133 const struct c_omp_directive c_omp_directives[] = {
4134 /* Keep this alphabetically sorted by the first word. Non-null second/third
4135 if any should precede null ones. */
4136 { "allocate", nullptr, nullptr, PRAGMA_OMP_ALLOCATE,
4137 C_OMP_DIR_DECLARATIVE, false },
4138 { "assume", nullptr, nullptr, PRAGMA_OMP_ASSUME,
4139 C_OMP_DIR_INFORMATIONAL, false },
4140 { "assumes", nullptr, nullptr, PRAGMA_OMP_ASSUMES,
4141 C_OMP_DIR_INFORMATIONAL, false },
4142 { "atomic", nullptr, nullptr, PRAGMA_OMP_ATOMIC,
4143 C_OMP_DIR_CONSTRUCT, false },
4144 { "barrier", nullptr, nullptr, PRAGMA_OMP_BARRIER,
4145 C_OMP_DIR_STANDALONE, false },
4146 { "begin", "assumes", nullptr, PRAGMA_OMP_BEGIN,
4147 C_OMP_DIR_INFORMATIONAL, false },
4148 { "begin", "declare", "target", PRAGMA_OMP_BEGIN,
4149 C_OMP_DIR_DECLARATIVE, false },
4150 /* { "begin", "declare", "variant", PRAGMA_OMP_BEGIN,
4151 C_OMP_DIR_DECLARATIVE, false }, */
4152 /* { "begin", "metadirective", nullptr, PRAGMA_OMP_BEGIN,
4153 C_OMP_DIR_???, ??? }, */
4154 { "cancel", nullptr, nullptr, PRAGMA_OMP_CANCEL,
4155 C_OMP_DIR_STANDALONE, false },
4156 { "cancellation", "point", nullptr, PRAGMA_OMP_CANCELLATION_POINT,
4157 C_OMP_DIR_STANDALONE, false },
4158 { "critical", nullptr, nullptr, PRAGMA_OMP_CRITICAL,
4159 C_OMP_DIR_CONSTRUCT, false },
4160 /* { "declare", "mapper", nullptr, PRAGMA_OMP_DECLARE,
4161 C_OMP_DIR_DECLARATIVE, false }, */
4162 { "declare", "reduction", nullptr, PRAGMA_OMP_DECLARE,
4163 C_OMP_DIR_DECLARATIVE, true },
4164 { "declare", "simd", nullptr, PRAGMA_OMP_DECLARE,
4165 C_OMP_DIR_DECLARATIVE, true },
4166 { "declare", "target", nullptr, PRAGMA_OMP_DECLARE,
4167 C_OMP_DIR_DECLARATIVE, false },
4168 { "declare", "variant", nullptr, PRAGMA_OMP_DECLARE,
4169 C_OMP_DIR_DECLARATIVE, false },
4170 { "depobj", nullptr, nullptr, PRAGMA_OMP_DEPOBJ,
4171 C_OMP_DIR_STANDALONE, false },
4172 /* { "dispatch", nullptr, nullptr, PRAGMA_OMP_DISPATCH,
4173 C_OMP_DIR_CONSTRUCT, false }, */
4174 { "distribute", nullptr, nullptr, PRAGMA_OMP_DISTRIBUTE,
4175 C_OMP_DIR_CONSTRUCT, true },
4176 { "end", "assumes", nullptr, PRAGMA_OMP_END,
4177 C_OMP_DIR_INFORMATIONAL, false },
4178 { "end", "declare", "target", PRAGMA_OMP_END,
4179 C_OMP_DIR_DECLARATIVE, false },
4180 /* { "end", "declare", "variant", PRAGMA_OMP_END,
4181 C_OMP_DIR_DECLARATIVE, false }, */
4182 /* { "end", "metadirective", nullptr, PRAGMA_OMP_END,
4183 C_OMP_DIR_???, ??? }, */
4184 /* error with at(execution) is C_OMP_DIR_STANDALONE. */
4185 { "error", nullptr, nullptr, PRAGMA_OMP_ERROR,
4186 C_OMP_DIR_UTILITY, false },
4187 { "flush", nullptr, nullptr, PRAGMA_OMP_FLUSH,
4188 C_OMP_DIR_STANDALONE, false },
4189 { "for", nullptr, nullptr, PRAGMA_OMP_FOR,
4190 C_OMP_DIR_CONSTRUCT, true },
4191 /* { "groupprivate", nullptr, nullptr, PRAGMA_OMP_GROUPPRIVATE,
4192 C_OMP_DIR_DECLARATIVE, false }, */
4193 /* { "interop", nullptr, nullptr, PRAGMA_OMP_INTEROP,
4194 C_OMP_DIR_STANDALONE, false }, */
4195 { "loop", nullptr, nullptr, PRAGMA_OMP_LOOP,
4196 C_OMP_DIR_CONSTRUCT, true },
4197 { "masked", nullptr, nullptr, PRAGMA_OMP_MASKED,
4198 C_OMP_DIR_CONSTRUCT, true },
4199 { "master", nullptr, nullptr, PRAGMA_OMP_MASTER,
4200 C_OMP_DIR_CONSTRUCT, true },
4201 /* { "metadirective", nullptr, nullptr, PRAGMA_OMP_METADIRECTIVE,
4202 C_OMP_DIR_???, ??? }, */
4203 { "nothing", nullptr, nullptr, PRAGMA_OMP_NOTHING,
4204 C_OMP_DIR_UTILITY, false },
4205 /* ordered with depend clause is C_OMP_DIR_STANDALONE. */
4206 { "ordered", nullptr, nullptr, PRAGMA_OMP_ORDERED,
4207 C_OMP_DIR_CONSTRUCT, true },
4208 { "parallel", nullptr, nullptr, PRAGMA_OMP_PARALLEL,
4209 C_OMP_DIR_CONSTRUCT, true },
4210 { "requires", nullptr, nullptr, PRAGMA_OMP_REQUIRES,
4211 C_OMP_DIR_INFORMATIONAL, false },
4212 { "scan", nullptr, nullptr, PRAGMA_OMP_SCAN,
4213 C_OMP_DIR_CONSTRUCT, true },
4214 { "scope", nullptr, nullptr, PRAGMA_OMP_SCOPE,
4215 C_OMP_DIR_CONSTRUCT, false },
4216 { "section", nullptr, nullptr, PRAGMA_OMP_SECTION,
4217 C_OMP_DIR_CONSTRUCT, false },
4218 { "sections", nullptr, nullptr, PRAGMA_OMP_SECTIONS,
4219 C_OMP_DIR_CONSTRUCT, false },
4220 { "simd", nullptr, nullptr, PRAGMA_OMP_SIMD,
4221 C_OMP_DIR_CONSTRUCT, true },
4222 { "single", nullptr, nullptr, PRAGMA_OMP_SINGLE,
4223 C_OMP_DIR_CONSTRUCT, false },
4224 { "target", "data", nullptr, PRAGMA_OMP_TARGET,
4225 C_OMP_DIR_CONSTRUCT, false },
4226 { "target", "enter", "data", PRAGMA_OMP_TARGET,
4227 C_OMP_DIR_STANDALONE, false },
4228 { "target", "exit", "data", PRAGMA_OMP_TARGET,
4229 C_OMP_DIR_STANDALONE, false },
4230 { "target", "update", nullptr, PRAGMA_OMP_TARGET,
4231 C_OMP_DIR_STANDALONE, false },
4232 { "target", nullptr, nullptr, PRAGMA_OMP_TARGET,
4233 C_OMP_DIR_CONSTRUCT, true },
4234 { "task", nullptr, nullptr, PRAGMA_OMP_TASK,
4235 C_OMP_DIR_CONSTRUCT, false },
4236 { "taskgroup", nullptr, nullptr, PRAGMA_OMP_TASKGROUP,
4237 C_OMP_DIR_CONSTRUCT, false },
4238 { "taskloop", nullptr, nullptr, PRAGMA_OMP_TASKLOOP,
4239 C_OMP_DIR_CONSTRUCT, true },
4240 { "taskwait", nullptr, nullptr, PRAGMA_OMP_TASKWAIT,
4241 C_OMP_DIR_STANDALONE, false },
4242 { "taskyield", nullptr, nullptr, PRAGMA_OMP_TASKYIELD,
4243 C_OMP_DIR_STANDALONE, false },
4244 /* { "tile", nullptr, nullptr, PRAGMA_OMP_TILE,
4245 C_OMP_DIR_CONSTRUCT, false }, */
4246 { "teams", nullptr, nullptr, PRAGMA_OMP_TEAMS,
4247 C_OMP_DIR_CONSTRUCT, true },
4248 { "threadprivate", nullptr, nullptr, PRAGMA_OMP_THREADPRIVATE,
4249 C_OMP_DIR_DECLARATIVE, false }
4250 /* { "unroll", nullptr, nullptr, PRAGMA_OMP_UNROLL,
4251 C_OMP_DIR_CONSTRUCT, false }, */
4254 /* Find (non-combined/composite) OpenMP directive (if any) which starts
4255 with FIRST keyword and for multi-word directives has SECOND and
4256 THIRD keyword after it. */
4258 const struct c_omp_directive *
4259 c_omp_categorize_directive (const char *first, const char *second,
4260 const char *third)
4262 const size_t n_omp_directives = ARRAY_SIZE (c_omp_directives);
4263 for (size_t i = 0; i < n_omp_directives; i++)
4265 if ((unsigned char) c_omp_directives[i].first[0]
4266 < (unsigned char) first[0])
4267 continue;
4268 if ((unsigned char) c_omp_directives[i].first[0]
4269 > (unsigned char) first[0])
4270 break;
4271 if (strcmp (c_omp_directives[i].first, first))
4272 continue;
4273 if (!c_omp_directives[i].second)
4274 return &c_omp_directives[i];
4275 if (!second || strcmp (c_omp_directives[i].second, second))
4276 continue;
4277 if (!c_omp_directives[i].third)
4278 return &c_omp_directives[i];
4279 if (!third || strcmp (c_omp_directives[i].third, third))
4280 continue;
4281 return &c_omp_directives[i];
4283 return NULL;