[NDS32] Implment n15 pipeline.
[official-gcc.git] / gcc / config / nds32 / nds32-utils.c
blob7c93cd2edd0598bc3307cc9524f6894638188ab2
1 /* Auxiliary functions for pipeline descriptions pattern of Andes
2 NDS32 cpu for GNU compiler
3 Copyright (C) 2012-2018 Free Software Foundation, Inc.
4 Contributed by Andes Technology Corporation.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* ------------------------------------------------------------------------ */
24 #define IN_TARGET_CODE 1
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "backend.h"
30 #include "target.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "optabs.h" /* For GEN_FCN. */
36 #include "recog.h"
37 #include "tm-constrs.h"
38 #include "insn-attr.h"
41 namespace nds32 {
43 /* Get the rtx in the PATTERN field of an insn. If INSN is not an insn,
44 the funciton doesn't change anything and returns it directly. */
45 rtx
46 extract_pattern_from_insn (rtx insn)
48 if (INSN_P (insn))
49 return PATTERN (insn);
51 return insn;
54 /* Get the number of elements in a parallel rtx. */
55 size_t
56 parallel_elements (rtx parallel_rtx)
58 parallel_rtx = extract_pattern_from_insn (parallel_rtx);
59 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL);
61 return XVECLEN (parallel_rtx, 0);
64 /* Extract an rtx from a parallel rtx with index NTH. If NTH is a negative
65 value, the function returns the last NTH rtx. */
66 rtx
67 parallel_element (rtx parallel_rtx, int nth)
69 parallel_rtx = extract_pattern_from_insn (parallel_rtx);
70 gcc_assert (GET_CODE (parallel_rtx) == PARALLEL);
72 int len = parallel_elements (parallel_rtx);
74 if (nth >= 0)
76 if (nth >= len)
77 return NULL_RTX;
79 return XVECEXP (parallel_rtx, 0, nth);
81 else
83 if (len + nth < 0)
84 return NULL_RTX;
86 return XVECEXP (parallel_rtx, 0, len + nth);
90 /* Functions to determine whether INSN is single-word, double-word
91 or partial-word load/store insn. */
93 bool
94 load_single_p (rtx_insn *insn)
96 if (get_attr_type (insn) != TYPE_LOAD)
97 return false;
99 if (INSN_CODE (insn) == CODE_FOR_move_di ||
100 INSN_CODE (insn) == CODE_FOR_move_df)
101 return false;
103 return true;
106 bool
107 store_single_p (rtx_insn *insn)
109 if (get_attr_type (insn) != TYPE_STORE)
110 return false;
112 if (INSN_CODE (insn) == CODE_FOR_move_di ||
113 INSN_CODE (insn) == CODE_FOR_move_df)
114 return false;
116 return true;
119 bool
120 load_double_p (rtx_insn *insn)
122 if (get_attr_type (insn) != TYPE_LOAD)
123 return false;
125 if (INSN_CODE (insn) != CODE_FOR_move_di &&
126 INSN_CODE (insn) != CODE_FOR_move_df)
127 return false;
129 return true;
132 bool
133 store_double_p (rtx_insn *insn)
135 if (get_attr_type (insn) != TYPE_STORE)
136 return false;
138 if (INSN_CODE (insn) != CODE_FOR_move_di &&
139 INSN_CODE (insn) != CODE_FOR_move_df)
140 return false;
142 return true;
145 bool
146 store_offset_reg_p (rtx_insn *insn)
148 if (get_attr_type (insn) != TYPE_STORE)
149 return false;
151 rtx offset_rtx = extract_offset_rtx (insn);
153 if (offset_rtx == NULL_RTX)
154 return false;
156 if (REG_P (offset_rtx))
157 return true;
159 return false;
162 /* Determine if INSN is a post update insn. */
163 bool
164 post_update_insn_p (rtx_insn *insn)
166 if (find_post_update_rtx (insn) == -1)
167 return false;
168 else
169 return true;
172 /* Check if the address of MEM_RTX consists of a base register and an
173 immediate offset. */
174 bool
175 immed_offset_p (rtx mem_rtx)
177 gcc_assert (MEM_P (mem_rtx));
179 rtx addr_rtx = XEXP (mem_rtx, 0);
181 /* (mem (reg)) is equivalent to (mem (plus (reg) (const_int 0))) */
182 if (REG_P (addr_rtx))
183 return true;
185 /* (mem (plus (reg) (const_int))) */
186 if (GET_CODE (addr_rtx) == PLUS
187 && GET_CODE (XEXP (addr_rtx, 1)) == CONST_INT)
188 return true;
190 return false;
193 /* Find the post update rtx in INSN. If INSN is a load/store multiple insn,
194 the function returns the vector index of its parallel part. If INSN is a
195 single load/store insn, the function returns 0. If INSN is not a post-
196 update insn, the function returns -1. */
198 find_post_update_rtx (rtx_insn *insn)
200 rtx mem_rtx;
201 int i, len;
203 switch (get_attr_type (insn))
205 case TYPE_LOAD_MULTIPLE:
206 case TYPE_STORE_MULTIPLE:
207 /* Find a pattern in a parallel rtx:
208 (set (reg) (plus (reg) (const_int))) */
209 len = parallel_elements (insn);
210 for (i = 0; i < len; ++i)
212 rtx curr_insn = parallel_element (insn, i);
214 if (GET_CODE (curr_insn) == SET
215 && REG_P (SET_DEST (curr_insn))
216 && GET_CODE (SET_SRC (curr_insn)) == PLUS)
217 return i;
219 return -1;
221 case TYPE_LOAD:
222 case TYPE_FLOAD:
223 case TYPE_STORE:
224 case TYPE_FSTORE:
225 mem_rtx = extract_mem_rtx (insn);
226 /* (mem (post_inc (reg))) */
227 switch (GET_CODE (XEXP (mem_rtx, 0)))
229 case POST_INC:
230 case POST_DEC:
231 case POST_MODIFY:
232 return 0;
234 default:
235 return -1;
238 default:
239 gcc_unreachable ();
243 /* Extract the MEM rtx from a load/store insn. */
245 extract_mem_rtx (rtx_insn *insn)
247 rtx body = PATTERN (insn);
249 switch (get_attr_type (insn))
251 case TYPE_LOAD:
252 case TYPE_FLOAD:
253 if (MEM_P (SET_SRC (body)))
254 return SET_SRC (body);
256 /* unaligned address: (unspec [(mem)]) */
257 if (GET_CODE (SET_SRC (body)) == UNSPEC)
259 gcc_assert (MEM_P (XVECEXP (SET_SRC (body), 0, 0)));
260 return XVECEXP (SET_SRC (body), 0, 0);
263 /* (sign_extend (mem)) */
264 gcc_assert (MEM_P (XEXP (SET_SRC (body), 0)));
265 return XEXP (SET_SRC (body), 0);
267 case TYPE_STORE:
268 case TYPE_FSTORE:
269 if (MEM_P (SET_DEST (body)))
270 return SET_DEST (body);
272 /* unaligned address: (unspec [(mem)]) */
273 if (GET_CODE (SET_DEST (body)) == UNSPEC)
275 gcc_assert (MEM_P (XVECEXP (SET_DEST (body), 0, 0)));
276 return XVECEXP (SET_DEST (body), 0, 0);
279 /* (sign_extend (mem)) */
280 gcc_assert (MEM_P (XEXP (SET_DEST (body), 0)));
281 return XEXP (SET_DEST (body), 0);
283 default:
284 gcc_unreachable ();
288 /* Extract the base register from load/store insns. The function returns
289 NULL_RTX if the address is not consist of any registers. */
291 extract_base_reg (rtx_insn *insn)
293 int post_update_rtx_index;
294 rtx mem_rtx;
295 rtx plus_rtx;
297 /* Find the MEM rtx. If we can find an insn updating the base register,
298 the base register will be returned directly. */
299 switch (get_attr_type (insn))
301 case TYPE_LOAD_MULTIPLE:
302 post_update_rtx_index = find_post_update_rtx (insn);
304 if (post_update_rtx_index != -1)
305 return SET_DEST (parallel_element (insn, post_update_rtx_index));
307 mem_rtx = SET_SRC (parallel_element (insn, 0));
308 break;
310 case TYPE_STORE_MULTIPLE:
311 post_update_rtx_index = find_post_update_rtx (insn);
313 if (post_update_rtx_index != -1)
314 return SET_DEST (parallel_element (insn, post_update_rtx_index));
316 mem_rtx = SET_DEST (parallel_element (insn, 0));
317 break;
319 case TYPE_LOAD:
320 case TYPE_FLOAD:
321 case TYPE_STORE:
322 case TYPE_FSTORE:
323 mem_rtx = extract_mem_rtx (insn);
324 break;
326 default:
327 gcc_unreachable ();
330 gcc_assert (MEM_P (mem_rtx));
332 /* (mem (reg)) */
333 if (REG_P (XEXP (mem_rtx, 0)))
334 return XEXP (mem_rtx, 0);
336 /* (mem (lo_sum (reg) (symbol_ref)) */
337 if (GET_CODE (XEXP (mem_rtx, 0)) == LO_SUM)
338 return XEXP (XEXP (mem_rtx, 0), 0);
340 plus_rtx = XEXP (mem_rtx, 0);
342 if (GET_CODE (plus_rtx) == SYMBOL_REF
343 || GET_CODE (plus_rtx) == CONST)
344 return NULL_RTX;
346 /* (mem (plus (reg) (const_int))) or
347 (mem (plus (mult (reg) (const_int 4)) (reg))) or
348 (mem (post_inc (reg))) or
349 (mem (post_dec (reg))) or
350 (mem (post_modify (reg) (plus (reg) (reg)))) */
351 gcc_assert (GET_CODE (plus_rtx) == PLUS
352 || GET_CODE (plus_rtx) == POST_INC
353 || GET_CODE (plus_rtx) == POST_DEC
354 || GET_CODE (plus_rtx) == POST_MODIFY);
356 if (REG_P (XEXP (plus_rtx, 0)))
357 return XEXP (plus_rtx, 0);
359 gcc_assert (REG_P (XEXP (plus_rtx, 1)));
360 return XEXP (plus_rtx, 1);
363 /* Extract the offset rtx from load/store insns. The function returns
364 NULL_RTX if offset is absent. */
366 extract_offset_rtx (rtx_insn *insn)
368 rtx mem_rtx;
369 rtx plus_rtx;
370 rtx offset_rtx;
372 /* Find the MEM rtx. The multiple load/store insns doens't have
373 the offset field so we can return NULL_RTX here. */
374 switch (get_attr_type (insn))
376 case TYPE_LOAD_MULTIPLE:
377 case TYPE_STORE_MULTIPLE:
378 return NULL_RTX;
380 case TYPE_LOAD:
381 case TYPE_FLOAD:
382 case TYPE_STORE:
383 case TYPE_FSTORE:
384 mem_rtx = extract_mem_rtx (insn);
385 break;
387 default:
388 gcc_unreachable ();
391 gcc_assert (MEM_P (mem_rtx));
393 /* (mem (reg)) */
394 if (REG_P (XEXP (mem_rtx, 0)))
395 return NULL_RTX;
397 plus_rtx = XEXP (mem_rtx, 0);
399 switch (GET_CODE (plus_rtx))
401 case SYMBOL_REF:
402 case CONST:
403 case POST_INC:
404 case POST_DEC:
405 return NULL_RTX;
407 case PLUS:
408 /* (mem (plus (reg) (const_int))) or
409 (mem (plus (mult (reg) (const_int 4)) (reg))) */
410 if (REG_P (XEXP (plus_rtx, 0)))
411 offset_rtx = XEXP (plus_rtx, 1);
412 else
414 gcc_assert (REG_P (XEXP (plus_rtx, 1)));
415 offset_rtx = XEXP (plus_rtx, 0);
418 if (ARITHMETIC_P (offset_rtx))
420 gcc_assert (GET_CODE (offset_rtx) == MULT);
421 gcc_assert (REG_P (XEXP (offset_rtx, 0)));
422 offset_rtx = XEXP (offset_rtx, 0);
424 break;
426 case LO_SUM:
427 /* (mem (lo_sum (reg) (symbol_ref)) */
428 offset_rtx = XEXP (plus_rtx, 1);
429 break;
431 case POST_MODIFY:
432 /* (mem (post_modify (reg) (plus (reg) (reg / const_int)))) */
433 gcc_assert (REG_P (XEXP (plus_rtx, 0)));
434 plus_rtx = XEXP (plus_rtx, 1);
435 gcc_assert (GET_CODE (plus_rtx) == PLUS);
436 offset_rtx = XEXP (plus_rtx, 0);
437 break;
439 default:
440 gcc_unreachable ();
443 return offset_rtx;
446 /* Extract the register of the shift operand from an ALU_SHIFT rtx. */
448 extract_shift_reg (rtx alu_shift_rtx)
450 alu_shift_rtx = extract_pattern_from_insn (alu_shift_rtx);
452 rtx alu_rtx = SET_SRC (alu_shift_rtx);
453 rtx shift_rtx;
455 /* Various forms of ALU_SHIFT can be made by the combiner.
456 See the difference between add_slli and sub_slli in nds32.md. */
457 if (REG_P (XEXP (alu_rtx, 0)))
458 shift_rtx = XEXP (alu_rtx, 1);
459 else
460 shift_rtx = XEXP (alu_rtx, 0);
462 return XEXP (shift_rtx, 0);
465 /* Check if INSN is a movd44 insn. */
466 bool
467 movd44_insn_p (rtx_insn *insn)
469 if (get_attr_type (insn) == TYPE_ALU
470 && (INSN_CODE (insn) == CODE_FOR_move_di
471 || INSN_CODE (insn) == CODE_FOR_move_df))
473 rtx body = PATTERN (insn);
474 gcc_assert (GET_CODE (body) == SET);
476 rtx src = SET_SRC (body);
477 rtx dest = SET_DEST (body);
479 if ((REG_P (src) || GET_CODE (src) == SUBREG)
480 && (REG_P (dest) || GET_CODE (dest) == SUBREG))
481 return true;
483 return false;
486 return false;
489 /* Extract the second result (odd reg) of a movd44 insn. */
491 extract_movd44_odd_reg (rtx_insn *insn)
493 gcc_assert (movd44_insn_p (insn));
495 rtx def_reg = SET_DEST (PATTERN (insn));
496 machine_mode mode;
498 gcc_assert (REG_P (def_reg) || GET_CODE (def_reg) == SUBREG);
499 switch (GET_MODE (def_reg))
501 case E_DImode:
502 mode = SImode;
503 break;
505 case E_DFmode:
506 mode = SFmode;
507 break;
509 default:
510 gcc_unreachable ();
513 return gen_highpart (mode, def_reg);
516 /* Extract the rtx representing non-accumulation operands of a MAC insn. */
518 extract_mac_non_acc_rtx (rtx_insn *insn)
520 rtx exp = SET_SRC (PATTERN (insn));
522 switch (get_attr_type (insn))
524 case TYPE_MAC:
525 case TYPE_DMAC:
526 if (REG_P (XEXP (exp, 0)))
527 return XEXP (exp, 1);
528 else
529 return XEXP (exp, 0);
531 default:
532 gcc_unreachable ();
536 /* Check if the DIV insn needs two write ports. */
537 bool
538 divmod_p (rtx_insn *insn)
540 gcc_assert (get_attr_type (insn) == TYPE_DIV);
542 if (INSN_CODE (insn) == CODE_FOR_divmodsi4
543 || INSN_CODE (insn) == CODE_FOR_udivmodsi4)
544 return true;
546 return false;
549 /* Extract the rtx representing the branch target to help recognize
550 data hazards. */
552 extract_branch_target_rtx (rtx_insn *insn)
554 gcc_assert (CALL_P (insn) || JUMP_P (insn));
556 rtx body = PATTERN (insn);
558 if (GET_CODE (body) == SET)
560 /* RTXs in IF_THEN_ELSE are branch conditions. */
561 if (GET_CODE (SET_SRC (body)) == IF_THEN_ELSE)
562 return NULL_RTX;
564 return SET_SRC (body);
567 if (GET_CODE (body) == CALL)
568 return XEXP (body, 0);
570 if (GET_CODE (body) == PARALLEL)
572 rtx first_rtx = parallel_element (body, 0);
574 if (GET_CODE (first_rtx) == SET)
575 return SET_SRC (first_rtx);
577 if (GET_CODE (first_rtx) == CALL)
578 return XEXP (first_rtx, 0);
581 /* Handle special cases of bltzal, bgezal and jralnez. */
582 if (GET_CODE (body) == COND_EXEC)
584 rtx addr_rtx = XEXP (body, 1);
586 if (GET_CODE (addr_rtx) == SET)
587 return SET_SRC (addr_rtx);
589 if (GET_CODE (addr_rtx) == PARALLEL)
591 rtx first_rtx = parallel_element (addr_rtx, 0);
593 if (GET_CODE (first_rtx) == SET)
595 rtx call_rtx = SET_SRC (first_rtx);
596 gcc_assert (GET_CODE (call_rtx) == CALL);
598 return XEXP (call_rtx, 0);
601 if (GET_CODE (first_rtx) == CALL)
602 return XEXP (first_rtx, 0);
606 gcc_unreachable ();
609 /* Extract the rtx representing the branch condition to help recognize
610 data hazards. */
612 extract_branch_condition_rtx (rtx_insn *insn)
614 gcc_assert (CALL_P (insn) || JUMP_P (insn));
616 rtx body = PATTERN (insn);
618 if (GET_CODE (body) == SET)
620 rtx if_then_else_rtx = SET_SRC (body);
622 if (GET_CODE (if_then_else_rtx) == IF_THEN_ELSE)
623 return XEXP (if_then_else_rtx, 0);
625 return NULL_RTX;
628 if (GET_CODE (body) == COND_EXEC)
629 return XEXP (body, 0);
631 return NULL_RTX;
634 } // namespace nds32