Handle constant CONSTRUCTORs in operand_compare
[official-gcc.git] / gcc / expr.h
blob2a172867fdb0825ce59b68891f1bce1814f70020
1 /* Definitions for code generation pass of GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #ifndef GCC_EXPR_H
21 #define GCC_EXPR_H
23 /* This is the 4th arg to `expand_expr'.
24 EXPAND_STACK_PARM means we are possibly expanding a call param onto
25 the stack.
26 EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
27 EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
28 EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
29 is a constant that is not a legitimate address.
30 EXPAND_WRITE means we are only going to write to the resulting rtx.
31 EXPAND_MEMORY means we are interested in a memory result, even if
32 the memory is constant and we could have propagated a constant value,
33 or the memory is unaligned on a STRICT_ALIGNMENT target. */
34 enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
35 EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
36 EXPAND_MEMORY};
38 /* Prevent the compiler from deferring stack pops. See
39 inhibit_defer_pop for more information. */
40 #define NO_DEFER_POP (inhibit_defer_pop += 1)
42 /* Allow the compiler to defer stack pops. See inhibit_defer_pop for
43 more information. */
44 #define OK_DEFER_POP (inhibit_defer_pop -= 1)
46 /* This structure is used to pass around information about exploded
47 unary, binary and trinary expressions between expand_expr_real_1 and
48 friends. */
49 typedef struct separate_ops
51 enum tree_code code;
52 location_t location;
53 tree type;
54 tree op0, op1, op2;
55 } *sepops;
57 /* This is run during target initialization to set up which modes can be
58 used directly in memory and to initialize the block move optab. */
59 extern void init_expr_target (void);
61 /* This is run at the start of compiling a function. */
62 extern void init_expr (void);
64 /* Emit some rtl insns to move data between rtx's, converting machine modes.
65 Both modes must be floating or both fixed. */
66 extern void convert_move (rtx, rtx, int);
68 /* Convert an rtx to specified machine mode and return the result. */
69 extern rtx convert_to_mode (machine_mode, rtx, int);
71 /* Convert an rtx to MODE from OLDMODE and return the result. */
72 extern rtx convert_modes (machine_mode mode, machine_mode oldmode,
73 rtx x, int unsignedp);
75 /* Variant of convert_modes for ABI parameter passing/return. */
76 extern rtx convert_float_to_wider_int (machine_mode mode, machine_mode fmode,
77 rtx x);
79 /* Variant of convert_modes for ABI parameter passing/return. */
80 extern rtx convert_wider_int_to_float (machine_mode mode, machine_mode imode,
81 rtx x);
83 /* Expand a call to memcpy or memmove or memcmp, and return the result. */
84 extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx,
85 bool);
87 inline rtx
88 emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
90 return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall);
93 inline rtx
94 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
96 return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall);
99 inline rtx
100 emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
102 return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall);
105 /* Emit code to move a block Y to a block X. */
106 enum block_op_methods
108 BLOCK_OP_NORMAL,
109 BLOCK_OP_NO_LIBCALL,
110 BLOCK_OP_CALL_PARM,
111 /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
112 BLOCK_OP_TAILCALL,
113 /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return
114 pc_rtx to indicate nothing has been emitted and let the caller handle
115 it. */
116 BLOCK_OP_NO_LIBCALL_RET
119 typedef rtx (*by_pieces_constfn) (void *, void *, HOST_WIDE_INT,
120 fixed_size_mode);
122 /* The second pointer passed to by_pieces_constfn. */
123 struct by_pieces_prev
125 rtx data;
126 fixed_size_mode mode;
129 extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
130 extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
131 unsigned int, HOST_WIDE_INT,
132 unsigned HOST_WIDE_INT,
133 unsigned HOST_WIDE_INT,
134 unsigned HOST_WIDE_INT,
135 bool bail_out_libcall = false,
136 bool *is_move_done = NULL,
137 bool might_overlap = false);
138 extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool,
139 by_pieces_constfn, void *);
140 extern bool emit_storent_insn (rtx to, rtx from);
142 /* Copy all or part of a value X into registers starting at REGNO.
143 The number of registers to be filled is NREGS. */
144 extern void move_block_to_reg (int, rtx, int, machine_mode);
146 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
147 The number of registers to be filled is NREGS. */
148 extern void move_block_from_reg (int, rtx, int);
150 /* Generate a non-consecutive group of registers represented by a PARALLEL. */
151 extern rtx gen_group_rtx (rtx);
153 /* Load a BLKmode value into non-consecutive registers represented by a
154 PARALLEL. */
155 extern void emit_group_load (rtx, rtx, tree, poly_int64);
157 /* Similarly, but load into new temporaries. */
158 extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64);
160 /* Move a non-consecutive group of registers represented by a PARALLEL into
161 a non-consecutive group of registers represented by a PARALLEL. */
162 extern void emit_group_move (rtx, rtx);
164 /* Move a group of registers represented by a PARALLEL into pseudos. */
165 extern rtx emit_group_move_into_temps (rtx);
167 /* Store a BLKmode value from non-consecutive registers represented by a
168 PARALLEL. */
169 extern void emit_group_store (rtx, rtx, tree, poly_int64);
171 extern rtx maybe_emit_group_store (rtx, tree);
173 /* Mark REG as holding a parameter for the next CALL_INSN.
174 Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */
175 extern void use_reg_mode (rtx *, rtx, machine_mode);
176 extern void clobber_reg_mode (rtx *, rtx, machine_mode);
178 extern rtx copy_blkmode_to_reg (machine_mode, tree);
180 /* Mark REG as holding a parameter for the next CALL_INSN. */
181 inline void
182 use_reg (rtx *fusage, rtx reg)
184 use_reg_mode (fusage, reg, VOIDmode);
187 /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */
188 inline void
189 clobber_reg (rtx *fusage, rtx reg)
191 clobber_reg_mode (fusage, reg, VOIDmode);
194 /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
195 for the next CALL_INSN. */
196 extern void use_regs (rtx *, int, int);
198 /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
199 extern void use_group_regs (rtx *, rtx);
201 #ifdef GCC_INSN_CODES_H
202 extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx,
203 HOST_WIDE_INT);
204 #endif
206 /* Write zeros through the storage of OBJECT.
207 If OBJECT has BLKmode, SIZE is its length in bytes. */
208 extern rtx clear_storage (rtx, rtx, enum block_op_methods);
209 extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
210 unsigned int, HOST_WIDE_INT,
211 unsigned HOST_WIDE_INT,
212 unsigned HOST_WIDE_INT,
213 unsigned HOST_WIDE_INT,
214 unsigned);
215 /* The same, but always output an library call. */
216 extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false);
218 /* Expand a setmem pattern; return true if successful. */
219 extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
220 unsigned int, HOST_WIDE_INT,
221 unsigned HOST_WIDE_INT,
222 unsigned HOST_WIDE_INT,
223 unsigned HOST_WIDE_INT);
225 /* Return true if it is desirable to store LEN bytes generated by
226 CONSTFUN with several move instructions by store_by_pieces
227 function. CONSTFUNDATA is a pointer which will be passed as argument
228 in every CONSTFUN call.
229 ALIGN is maximum alignment we can assume.
230 MEMSETP is true if this is a real memset/bzero, not a copy
231 of a const string. */
232 extern bool can_store_by_pieces (unsigned HOST_WIDE_INT,
233 by_pieces_constfn,
234 void *, unsigned int, bool);
236 /* Generate several move instructions to store LEN bytes generated by
237 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
238 pointer which will be passed as argument in every CONSTFUN call.
239 ALIGN is maximum alignment we can assume.
240 MEMSETP is true if this is a real memset/bzero, not a copy.
241 Returns TO + LEN. */
242 extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn,
243 void *, unsigned int, bool, memop_ret);
245 /* If can_store_by_pieces passes for worst-case values near MAX_LEN, call
246 store_by_pieces within conditionals so as to handle variable LEN efficiently,
247 storing VAL, if non-NULL_RTX, or valc instead. */
248 extern bool try_store_by_multiple_pieces (rtx to, rtx len,
249 unsigned int ctz_len,
250 unsigned HOST_WIDE_INT min_len,
251 unsigned HOST_WIDE_INT max_len,
252 rtx val, char valc,
253 unsigned int align);
255 /* Emit insns to set X from Y. */
256 extern rtx_insn *emit_move_insn (rtx, rtx);
257 extern rtx_insn *gen_move_insn (rtx, rtx);
259 /* Emit insns to set X from Y, with no frills. */
260 extern rtx_insn *emit_move_insn_1 (rtx, rtx);
262 extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx);
263 extern rtx_insn *emit_move_complex_parts (rtx, rtx);
264 extern rtx read_complex_part (rtx, bool);
265 extern void write_complex_part (rtx, rtx, bool, bool);
266 extern rtx read_complex_part (rtx, bool);
267 extern rtx emit_move_resolve_push (machine_mode, rtx);
269 /* Push a block of length SIZE (perhaps variable)
270 and return an rtx to address the beginning of the block. */
271 extern rtx push_block (rtx, poly_int64, int);
273 /* Generate code to push something onto the stack, given its mode and type. */
274 extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
275 int, rtx, poly_int64, rtx, rtx, int, rtx, bool);
277 /* Extract the accessible bit-range from a COMPONENT_REF. */
278 extern void get_bit_range (poly_uint64 *, poly_uint64 *, tree,
279 poly_int64 *, tree *);
281 /* Expand an assignment that stores the value of FROM into TO. */
282 extern void expand_assignment (tree, tree, bool);
284 /* Generate code for computing expression EXP,
285 and storing the value into TARGET.
286 If SUGGEST_REG is nonzero, copy the value through a register
287 and return that register, if that is possible. */
288 extern rtx store_expr (tree, rtx, int, bool, bool);
290 /* Given an rtx that may include add and multiply operations,
291 generate them as insns and return a pseudo-reg containing the value.
292 Useful after calling expand_expr with 1 as sum_ok. */
293 extern rtx force_operand (rtx, rtx);
295 /* Work horses for expand_expr. */
296 extern rtx expand_expr_real (tree, rtx, machine_mode,
297 enum expand_modifier, rtx *, bool);
298 extern rtx expand_expr_real_1 (tree, rtx, machine_mode,
299 enum expand_modifier, rtx *, bool);
300 extern rtx expand_expr_real_2 (sepops, rtx, machine_mode,
301 enum expand_modifier);
303 /* Generate code for computing expression EXP.
304 An rtx for the computed value is returned. The value is never null.
305 In the case of a void EXP, const0_rtx is returned. */
306 inline rtx
307 expand_expr (tree exp, rtx target, machine_mode mode,
308 enum expand_modifier modifier)
310 return expand_expr_real (exp, target, mode, modifier, NULL, false);
313 inline rtx
314 expand_normal (tree exp)
316 return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false);
320 /* Return STRING_CST and set offset, size and decl, if the first
321 argument corresponds to a string constant. */
322 extern tree string_constant (tree, tree *, tree *, tree *);
323 /* Similar to string_constant, return a STRING_CST corresponding
324 to the value representation of the first argument if it's
325 a constant. */
326 extern tree byte_representation (tree, tree *, tree *, tree *);
328 extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *);
329 extern void maybe_optimize_sub_cmp_0 (enum tree_code, tree *, tree *);
331 /* Two different ways of generating switch statements. */
332 extern bool try_casesi (tree, tree, tree, tree, rtx, rtx, rtx,
333 profile_probability);
334 extern bool try_tablejump (tree, tree, tree, tree, rtx, rtx,
335 profile_probability);
337 extern bool safe_from_p (const_rtx, tree, int);
339 /* Get the personality libfunc for a function decl. */
340 rtx get_personality_function (tree);
342 /* Determine whether the LEN bytes can be moved by using several move
343 instructions. Return nonzero if a call to move_by_pieces should
344 succeed. */
345 extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
347 extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree);
349 extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
350 HOST_WIDE_INT *, HOST_WIDE_INT *,
351 bool *);
352 extern bool immediate_const_ctor_p (const_tree, unsigned int words = 1);
353 extern void store_constructor (tree, rtx, int, poly_int64, bool);
354 extern HOST_WIDE_INT int_expr_size (const_tree exp);
356 extern void expand_operands (tree, tree, rtx, rtx*, rtx*,
357 enum expand_modifier);
359 /* rtl.h and tree.h were included. */
360 /* Return an rtx for the size in bytes of the value of an expr. */
361 extern rtx expr_size (tree);
363 extern bool mem_ref_refers_to_non_mem_p (tree);
364 extern bool non_mem_decl_p (tree);
366 #endif /* GCC_EXPR_H */