Merge from trunk @217148.
[official-gcc.git] / gcc / expr.h
blob1f21ab047f3f7f6cdd5945e0ea03669929fea8b9
1 /* Definitions for code generation pass of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #ifndef GCC_EXPR_H
21 #define GCC_EXPR_H
23 /* For inhibit_defer_pop */
24 #include "hashtab.h"
25 #include "hash-set.h"
26 #include "vec.h"
27 #include "machmode.h"
28 #include "tm.h"
29 #include "hard-reg-set.h"
30 #include "input.h"
31 #include "function.h"
32 /* For XEXP, GEN_INT, rtx_code */
33 #include "rtl.h"
34 /* For optimize_size */
35 #include "flags.h"
36 /* For tree_fits_[su]hwi_p, tree_to_[su]hwi, fold_convert, size_binop,
37 ssize_int, TREE_CODE, TYPE_SIZE, int_size_in_bytes, */
38 #include "tree-core.h"
39 /* For GET_MODE_BITSIZE, word_mode */
40 #include "insn-config.h"
42 /* This is the 4th arg to `expand_expr'.
43 EXPAND_STACK_PARM means we are possibly expanding a call param onto
44 the stack.
45 EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
46 EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
47 EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
48 is a constant that is not a legitimate address.
49 EXPAND_WRITE means we are only going to write to the resulting rtx.
50 EXPAND_MEMORY means we are interested in a memory result, even if
51 the memory is constant and we could have propagated a constant value,
52 or the memory is unaligned on a STRICT_ALIGNMENT target. */
53 enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
54 EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
55 EXPAND_MEMORY};
57 /* Prevent the compiler from deferring stack pops. See
58 inhibit_defer_pop for more information. */
59 #define NO_DEFER_POP (inhibit_defer_pop += 1)
61 /* Allow the compiler to defer stack pops. See inhibit_defer_pop for
62 more information. */
63 #define OK_DEFER_POP (inhibit_defer_pop -= 1)
65 /* This structure is used to pass around information about exploded
66 unary, binary and trinary expressions between expand_expr_real_1 and
67 friends. */
68 typedef struct separate_ops
70 enum tree_code code;
71 location_t location;
72 tree type;
73 tree op0, op1, op2;
74 } *sepops;
76 /* Functions from expmed.c: */
78 /* Arguments MODE, RTX: return an rtx for the negation of that value.
79 May emit insns. */
80 extern rtx negate_rtx (machine_mode, rtx);
82 /* Arguments MODE, RTX: return an rtx for the flipping of that value.
83 May emit insns. */
84 extern rtx flip_storage_order (enum machine_mode, rtx);
86 /* Expand a logical AND operation. */
87 extern rtx expand_and (machine_mode, rtx, rtx, rtx);
89 /* Emit a store-flag operation. */
90 extern rtx emit_store_flag (rtx, enum rtx_code, rtx, rtx, machine_mode,
91 int, int);
93 /* Like emit_store_flag, but always succeeds. */
94 extern rtx emit_store_flag_force (rtx, enum rtx_code, rtx, rtx,
95 machine_mode, int, int);
97 /* Choose a minimal N + 1 bit approximation to 1/D that can be used to
98 replace division by D, and put the least significant N bits of the result
99 in *MULTIPLIER_PTR and return the most significant bit. */
100 extern unsigned HOST_WIDE_INT choose_multiplier (unsigned HOST_WIDE_INT, int,
101 int, unsigned HOST_WIDE_INT *,
102 int *, int *);
104 /* Functions from expr.c: */
106 /* This is run during target initialization to set up which modes can be
107 used directly in memory and to initialize the block move optab. */
108 extern void init_expr_target (void);
110 /* This is run at the start of compiling a function. */
111 extern void init_expr (void);
113 /* Emit some rtl insns to move data between rtx's, converting machine modes.
114 Both modes must be floating or both fixed. */
115 extern void convert_move (rtx, rtx, int);
117 /* Convert an rtx to specified machine mode and return the result. */
118 extern rtx convert_to_mode (machine_mode, rtx, int);
120 /* Convert an rtx to MODE from OLDMODE and return the result. */
121 extern rtx convert_modes (machine_mode, machine_mode, rtx, int);
123 /* Emit code to move a block Y to a block X. */
125 enum block_op_methods
127 BLOCK_OP_NORMAL,
128 BLOCK_OP_NO_LIBCALL,
129 BLOCK_OP_CALL_PARM,
130 /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
131 BLOCK_OP_TAILCALL
134 extern GTY(()) tree block_clear_fn;
135 extern void init_block_move_fn (const char *);
136 extern void init_block_clear_fn (const char *);
138 extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
139 extern rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
140 extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
141 unsigned int, HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT,
144 unsigned HOST_WIDE_INT);
145 extern bool emit_storent_insn (rtx to, rtx from);
147 /* Copy all or part of a value X into registers starting at REGNO.
148 The number of registers to be filled is NREGS. */
149 extern void move_block_to_reg (int, rtx, int, machine_mode);
151 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
152 The number of registers to be filled is NREGS. */
153 extern void move_block_from_reg (int, rtx, int);
155 /* Generate a non-consecutive group of registers represented by a PARALLEL. */
156 extern rtx gen_group_rtx (rtx);
158 /* Load a BLKmode value into non-consecutive registers represented by a
159 PARALLEL. */
160 extern void emit_group_load (rtx, rtx, tree, int);
162 /* Similarly, but load into new temporaries. */
163 extern rtx emit_group_load_into_temps (rtx, rtx, tree, int);
165 /* Move a non-consecutive group of registers represented by a PARALLEL into
166 a non-consecutive group of registers represented by a PARALLEL. */
167 extern void emit_group_move (rtx, rtx);
169 /* Move a group of registers represented by a PARALLEL into pseudos. */
170 extern rtx emit_group_move_into_temps (rtx);
172 /* Store a BLKmode value from non-consecutive registers represented by a
173 PARALLEL. */
174 extern void emit_group_store (rtx, rtx, tree, int);
176 extern rtx maybe_emit_group_store (rtx, tree);
178 /* Copy BLKmode object from a set of registers. */
179 extern void copy_blkmode_from_reg (rtx, rtx, tree);
181 /* Mark REG as holding a parameter for the next CALL_INSN.
182 Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */
183 extern void use_reg_mode (rtx *, rtx, machine_mode);
184 extern void clobber_reg_mode (rtx *, rtx, machine_mode);
186 extern rtx copy_blkmode_to_reg (machine_mode, tree);
188 /* Mark REG as holding a parameter for the next CALL_INSN. */
189 static inline void
190 use_reg (rtx *fusage, rtx reg)
192 use_reg_mode (fusage, reg, VOIDmode);
195 /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */
196 static inline void
197 clobber_reg (rtx *fusage, rtx reg)
199 clobber_reg_mode (fusage, reg, VOIDmode);
202 /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
203 for the next CALL_INSN. */
204 extern void use_regs (rtx *, int, int);
206 /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
207 extern void use_group_regs (rtx *, rtx);
209 /* Write zeros through the storage of OBJECT.
210 If OBJECT has BLKmode, SIZE is its length in bytes. */
211 extern rtx clear_storage (rtx, rtx, enum block_op_methods);
212 extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
213 unsigned int, HOST_WIDE_INT,
214 unsigned HOST_WIDE_INT,
215 unsigned HOST_WIDE_INT,
216 unsigned HOST_WIDE_INT);
217 /* The same, but always output an library call. */
218 rtx set_storage_via_libcall (rtx, rtx, rtx, bool);
220 /* Expand a setmem pattern; return true if successful. */
221 extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
222 unsigned int, HOST_WIDE_INT,
223 unsigned HOST_WIDE_INT,
224 unsigned HOST_WIDE_INT,
225 unsigned HOST_WIDE_INT);
227 extern unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
228 unsigned int,
229 unsigned int);
231 /* Return nonzero if it is desirable to store LEN bytes generated by
232 CONSTFUN with several move instructions by store_by_pieces
233 function. CONSTFUNDATA is a pointer which will be passed as argument
234 in every CONSTFUN call.
235 ALIGN is maximum alignment we can assume.
236 MEMSETP is true if this is a real memset/bzero, not a copy
237 of a const string. */
238 extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
239 rtx (*) (void *, HOST_WIDE_INT,
240 machine_mode),
241 void *, unsigned int, bool);
243 /* Generate several move instructions to store LEN bytes generated by
244 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
245 pointer which will be passed as argument in every CONSTFUN call.
246 ALIGN is maximum alignment we can assume.
247 MEMSETP is true if this is a real memset/bzero, not a copy.
248 Returns TO + LEN. */
249 extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT,
250 rtx (*) (void *, HOST_WIDE_INT, machine_mode),
251 void *, unsigned int, bool, int);
253 /* Emit insns to set X from Y. */
254 extern rtx_insn *emit_move_insn (rtx, rtx);
255 extern rtx gen_move_insn (rtx, rtx);
257 /* Emit insns to set X from Y, with no frills. */
258 extern rtx_insn *emit_move_insn_1 (rtx, rtx);
260 extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx);
261 extern rtx_insn *emit_move_complex_parts (rtx, rtx);
262 extern rtx emit_move_resolve_push (machine_mode, rtx);
264 /* Push a block of length SIZE (perhaps variable)
265 and return an rtx to address the beginning of the block. */
266 extern rtx push_block (rtx, int, int);
268 /* Generate code to push something onto the stack, given its mode and type. */
269 extern void emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
270 int, rtx, int, rtx, rtx, int, rtx);
272 /* Expand an assignment that stores the value of FROM into TO. */
273 extern void expand_assignment (tree, tree, bool);
275 /* Generate code for computing expression EXP,
276 and storing the value into TARGET.
277 If SUGGEST_REG is nonzero, copy the value through a register
278 and return that register, if that is possible. */
279 extern rtx store_expr_with_bounds (tree, rtx, int, bool, bool, tree);
280 extern rtx store_expr (tree, rtx, int, bool, bool);
282 /* Given an rtx that may include add and multiply operations,
283 generate them as insns and return a pseudo-reg containing the value.
284 Useful after calling expand_expr with 1 as sum_ok. */
285 extern rtx force_operand (rtx, rtx);
287 /* Work horses for expand_expr. */
288 extern rtx expand_expr_real (tree, rtx, machine_mode,
289 enum expand_modifier, rtx *, bool);
290 extern rtx expand_expr_real_1 (tree, rtx, machine_mode,
291 enum expand_modifier, rtx *, bool);
292 extern rtx expand_expr_real_2 (sepops, rtx, machine_mode,
293 enum expand_modifier);
295 /* Generate code for computing expression EXP.
296 An rtx for the computed value is returned. The value is never null.
297 In the case of a void EXP, const0_rtx is returned. */
298 static inline rtx
299 expand_expr (tree exp, rtx target, machine_mode mode,
300 enum expand_modifier modifier)
302 return expand_expr_real (exp, target, mode, modifier, NULL, false);
305 static inline rtx
306 expand_normal (tree exp)
308 return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false);
311 /* At the start of a function, record that we have no previously-pushed
312 arguments waiting to be popped. */
313 extern void init_pending_stack_adjust (void);
315 /* Discard any pending stack adjustment. */
316 extern void discard_pending_stack_adjust (void);
318 /* When exiting from function, if safe, clear out any pending stack adjust
319 so the adjustment won't get done. */
320 extern void clear_pending_stack_adjust (void);
322 /* Pop any previously-pushed arguments that have not been popped yet. */
323 extern void do_pending_stack_adjust (void);
325 /* Struct for saving/restoring of pending_stack_adjust/stack_pointer_delta
326 values. */
328 struct saved_pending_stack_adjust
330 /* Saved value of pending_stack_adjust. */
331 int x_pending_stack_adjust;
333 /* Saved value of stack_pointer_delta. */
334 int x_stack_pointer_delta;
337 /* Remember pending_stack_adjust/stack_pointer_delta.
338 To be used around code that may call do_pending_stack_adjust (),
339 but the generated code could be discarded e.g. using delete_insns_since. */
341 extern void save_pending_stack_adjust (saved_pending_stack_adjust *);
343 /* Restore the saved pending_stack_adjust/stack_pointer_delta. */
345 extern void restore_pending_stack_adjust (saved_pending_stack_adjust *);
347 /* Return the tree node and offset if a given argument corresponds to
348 a string constant. */
349 extern tree string_constant (tree, tree *);
351 /* Generate code to evaluate EXP and jump to LABEL if the value is zero. */
352 extern void jumpifnot (tree, rtx, int);
353 extern void jumpifnot_1 (enum tree_code, tree, tree, rtx, int);
355 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
356 extern void jumpif (tree, rtx, int);
357 extern void jumpif_1 (enum tree_code, tree, tree, rtx, int);
359 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
360 the result is zero, or IF_TRUE_LABEL if the result is one. */
361 extern void do_jump (tree, rtx, rtx, int);
362 extern void do_jump_1 (enum tree_code, tree, tree, rtx, rtx, int);
364 extern void do_compare_rtx_and_jump (rtx, rtx, enum rtx_code, int,
365 machine_mode, rtx, rtx, rtx, int);
367 /* Two different ways of generating switch statements. */
368 extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, int);
369 extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, int);
371 /* Functions from alias.c */
372 #include "alias.h"
375 /* rtl.h and tree.h were included. */
376 /* Return an rtx for the size in bytes of the value of an expr. */
377 extern rtx expr_size (tree);
379 /* Return a wide integer for the size in bytes of the value of EXP, or -1
380 if the size can vary or is larger than an integer. */
381 extern HOST_WIDE_INT int_expr_size (tree);
383 /* Return an rtx that refers to the value returned by a function
384 in its original home. This becomes invalid if any more code is emitted. */
385 extern rtx hard_function_value (const_tree, const_tree, const_tree, int);
387 extern rtx prepare_call_address (tree, rtx, rtx, rtx *, int, int);
389 extern bool shift_return_value (machine_mode, bool, rtx);
391 extern rtx expand_call (tree, rtx, int);
393 extern void fixup_tail_calls (void);
395 #ifdef TREE_CODE
396 extern rtx expand_variable_shift (enum tree_code, machine_mode,
397 rtx, tree, rtx, int);
398 extern rtx expand_shift (enum tree_code, machine_mode, rtx, int, rtx,
399 int);
400 extern rtx expand_divmod (int, enum tree_code, machine_mode, rtx, rtx,
401 rtx, int);
402 #endif
404 /* Return the CODE_LABEL rtx for a LABEL_DECL, creating it if necessary. */
405 extern rtx label_rtx (tree);
407 /* As label_rtx, but additionally the label is placed on the forced label
408 list of its containing function (i.e. it is treated as reachable even
409 if how is not obvious). */
410 extern rtx force_label_rtx (tree);
412 /* Return an rtx like arg but sans any constant terms.
413 Returns the original rtx if it has no constant terms.
414 The constant terms are added and stored via a second arg. */
415 extern rtx eliminate_constant_term (rtx, rtx *);
417 /* Convert arg to a valid memory address for specified machine mode that points
418 to a specific named address space, by emitting insns to perform arithmetic
419 if necessary. */
420 extern rtx memory_address_addr_space (machine_mode, rtx, addr_space_t);
422 /* Like memory_address_addr_space, except assume the memory address points to
423 the generic named address space. */
424 #define memory_address(MODE,RTX) \
425 memory_address_addr_space ((MODE), (RTX), ADDR_SPACE_GENERIC)
427 /* Return a memory reference like MEMREF, but with its mode changed
428 to MODE and its address changed to ADDR.
429 (VOIDmode means don't change the mode.
430 NULL for ADDR means don't change the address.) */
431 extern rtx change_address (rtx, machine_mode, rtx);
433 /* Return a memory reference like MEMREF, but with its mode changed
434 to MODE and its address offset by OFFSET bytes. */
435 #define adjust_address(MEMREF, MODE, OFFSET) \
436 adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 0, 0)
438 /* Likewise, but the reference is not required to be valid. */
439 #define adjust_address_nv(MEMREF, MODE, OFFSET) \
440 adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 0, 0)
442 /* Return a memory reference like MEMREF, but with its mode changed
443 to MODE and its address offset by OFFSET bytes. Assume that it's
444 for a bitfield and conservatively drop the underlying object if we
445 cannot be sure to stay within its bounds. */
446 #define adjust_bitfield_address(MEMREF, MODE, OFFSET) \
447 adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, 0)
449 /* As for adjust_bitfield_address, but specify that the width of
450 BLKmode accesses is SIZE bytes. */
451 #define adjust_bitfield_address_size(MEMREF, MODE, OFFSET, SIZE) \
452 adjust_address_1 (MEMREF, MODE, OFFSET, 1, 1, 1, SIZE)
454 /* Likewise, but the reference is not required to be valid. */
455 #define adjust_bitfield_address_nv(MEMREF, MODE, OFFSET) \
456 adjust_address_1 (MEMREF, MODE, OFFSET, 0, 1, 1, 0)
458 /* Return a memory reference like MEMREF, but with its mode changed
459 to MODE and its address changed to ADDR, which is assumed to be
460 increased by OFFSET bytes from MEMREF. */
461 #define adjust_automodify_address(MEMREF, MODE, ADDR, OFFSET) \
462 adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 1)
464 /* Likewise, but the reference is not required to be valid. */
465 #define adjust_automodify_address_nv(MEMREF, MODE, ADDR, OFFSET) \
466 adjust_automodify_address_1 (MEMREF, MODE, ADDR, OFFSET, 0)
468 extern rtx adjust_address_1 (rtx, machine_mode, HOST_WIDE_INT, int, int,
469 int, HOST_WIDE_INT);
470 extern rtx adjust_automodify_address_1 (rtx, machine_mode, rtx,
471 HOST_WIDE_INT, int);
473 /* Return a memory reference like MEMREF, but whose address is changed by
474 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
475 known to be in OFFSET (possibly 1). */
476 extern rtx offset_address (rtx, rtx, unsigned HOST_WIDE_INT);
478 /* Definitions from emit-rtl.c */
479 #include "emit-rtl.h"
481 /* Return a memory reference like MEMREF, but with its mode widened to
482 MODE and adjusted by OFFSET. */
483 extern rtx widen_memory_access (rtx, machine_mode, HOST_WIDE_INT);
485 /* Return a memory reference like MEMREF, but which is known to have a
486 valid address. */
487 extern rtx validize_mem (rtx);
489 extern rtx use_anchored_address (rtx);
491 /* Given REF, a MEM, and T, either the type of X or the expression
492 corresponding to REF, set the memory attributes. OBJECTP is nonzero
493 if we are making a new object of this type. */
494 extern void set_mem_attributes (rtx, tree, int);
496 /* Similar, except that BITPOS has not yet been applied to REF, so if
497 we alter MEM_OFFSET according to T then we should subtract BITPOS
498 expecting that it'll be added back in later. */
499 extern void set_mem_attributes_minus_bitpos (rtx, tree, int, HOST_WIDE_INT);
501 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
502 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
503 -1 if not known. */
504 extern int get_mem_align_offset (rtx, unsigned int);
506 /* Assemble the static constant template for function entry trampolines. */
507 extern rtx assemble_trampoline_template (void);
509 /* Copy given rtx to a new temp reg and return that. */
510 extern rtx copy_to_reg (rtx);
512 /* Like copy_to_reg but always make the reg Pmode. */
513 extern rtx copy_addr_to_reg (rtx);
515 /* Like copy_to_reg but always make the reg the specified mode MODE. */
516 extern rtx copy_to_mode_reg (machine_mode, rtx);
518 /* Copy given rtx to given temp reg and return that. */
519 extern rtx copy_to_suggested_reg (rtx, rtx, machine_mode);
521 /* Copy a value to a register if it isn't already a register.
522 Args are mode (in case value is a constant) and the value. */
523 extern rtx force_reg (machine_mode, rtx);
525 /* Return given rtx, copied into a new temp reg if it was in memory. */
526 extern rtx force_not_mem (rtx);
528 /* Return mode and signedness to use when an argument or result in the
529 given mode is promoted. */
530 extern machine_mode promote_function_mode (const_tree, machine_mode, int *,
531 const_tree, int);
533 /* Return mode and signedness to use when an object in the given mode
534 is promoted. */
535 extern machine_mode promote_mode (const_tree, machine_mode, int *);
537 /* Return mode and signedness to use when object is promoted. */
538 machine_mode promote_decl_mode (const_tree, int *);
540 /* Remove some bytes from the stack. An rtx says how many. */
541 extern void adjust_stack (rtx);
543 /* Add some bytes to the stack. An rtx says how many. */
544 extern void anti_adjust_stack (rtx);
546 /* Add some bytes to the stack while probing it. An rtx says how many. */
547 extern void anti_adjust_stack_and_probe (rtx, bool);
549 /* This enum is used for the following two functions. */
550 enum save_level {SAVE_BLOCK, SAVE_FUNCTION, SAVE_NONLOCAL};
552 /* Save the stack pointer at the specified level. */
553 extern void emit_stack_save (enum save_level, rtx *);
555 /* Restore the stack pointer from a save area of the specified level. */
556 extern void emit_stack_restore (enum save_level, rtx);
558 /* Invoke emit_stack_save for the nonlocal_goto_save_area. */
559 extern void update_nonlocal_goto_save_area (void);
561 /* Allocate some space on the stack dynamically and return its address. */
562 extern rtx allocate_dynamic_stack_space (rtx, unsigned, unsigned, bool);
564 /* Emit one stack probe at ADDRESS, an address within the stack. */
565 extern void emit_stack_probe (rtx);
567 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
568 FIRST is a constant and size is a Pmode RTX. These are offsets from
569 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
570 or subtract them from the stack pointer. */
571 extern void probe_stack_range (HOST_WIDE_INT, rtx);
573 /* Return an rtx that refers to the value returned by a library call
574 in its original home. This becomes invalid if any more code is emitted. */
575 extern rtx hard_libcall_value (machine_mode, rtx);
577 extern void store_bit_field (rtx, unsigned HOST_WIDE_INT,
578 unsigned HOST_WIDE_INT,
579 unsigned HOST_WIDE_INT,
580 unsigned HOST_WIDE_INT,
581 machine_mode, rtx, bool);
582 extern rtx extract_bit_field (rtx, unsigned HOST_WIDE_INT,
583 unsigned HOST_WIDE_INT, int, rtx,
584 machine_mode, machine_mode, bool);
585 extern rtx extract_low_bits (machine_mode, machine_mode, rtx);
586 extern rtx expand_mult (machine_mode, rtx, rtx, rtx, int);
587 extern rtx expand_mult_highpart_adjust (machine_mode, rtx, rtx, rtx, rtx, int);
589 extern rtx assemble_static_space (unsigned HOST_WIDE_INT);
590 extern int safe_from_p (const_rtx, tree, int);
591 extern bool split_comparison (enum rtx_code, machine_mode,
592 enum rtx_code *, enum rtx_code *);
594 /* Get the personality libfunc for a function decl. */
595 rtx get_personality_function (tree);
598 /* In stmt.c */
600 /* Expand a GIMPLE_SWITCH statement. */
601 extern void expand_case (gimple);
603 /* Like expand_case but special-case for SJLJ exception dispatching. */
604 extern void expand_sjlj_dispatch_table (rtx, vec<tree> );
606 /* Determine whether the LEN bytes can be moved by using several move
607 instructions. Return nonzero if a call to move_by_pieces should
608 succeed. */
609 extern int can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
611 extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree);
612 bool array_at_struct_end_p (tree);
614 /* Return a tree of sizetype representing the size, in bytes, of the element
615 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
616 extern tree array_ref_element_size (tree);
618 extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
619 HOST_WIDE_INT *, bool *);
621 /* Return a tree representing the offset, in bytes, of the field referenced
622 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
623 extern tree component_ref_field_offset (tree);
625 #endif /* GCC_EXPR_H */