2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
8 * This library is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU Lesser General Public
10 * License as published by the Free Software Foundation; either
11 * version 2 of the License, or (at your option) any later version.
13 * This library is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * Lesser General Public License for more details.
18 * You should have received a copy of the GNU Lesser General Public
19 * License along with this library; if not, write to the Free Software
20 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
41 //#define MIPS_SINGLE_STEP
43 /* MIPS major opcodes */
44 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
47 /* indirect opcode tables */
48 OPC_SPECIAL
= (0x00 << 26),
49 OPC_REGIMM
= (0x01 << 26),
50 OPC_CP0
= (0x10 << 26),
51 OPC_CP1
= (0x11 << 26),
52 OPC_CP2
= (0x12 << 26),
53 OPC_CP3
= (0x13 << 26),
54 OPC_SPECIAL2
= (0x1C << 26),
55 OPC_SPECIAL3
= (0x1F << 26),
56 /* arithmetic with immediate */
57 OPC_ADDI
= (0x08 << 26),
58 OPC_ADDIU
= (0x09 << 26),
59 OPC_SLTI
= (0x0A << 26),
60 OPC_SLTIU
= (0x0B << 26),
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
80 OPC_LDL
= (0x1A << 26),
81 OPC_LDR
= (0x1B << 26),
82 OPC_LB
= (0x20 << 26),
83 OPC_LH
= (0x21 << 26),
84 OPC_LWL
= (0x22 << 26),
85 OPC_LW
= (0x23 << 26),
86 OPC_LBU
= (0x24 << 26),
87 OPC_LHU
= (0x25 << 26),
88 OPC_LWR
= (0x26 << 26),
89 OPC_LWU
= (0x27 << 26),
90 OPC_SB
= (0x28 << 26),
91 OPC_SH
= (0x29 << 26),
92 OPC_SWL
= (0x2A << 26),
93 OPC_SW
= (0x2B << 26),
94 OPC_SDL
= (0x2C << 26),
95 OPC_SDR
= (0x2D << 26),
96 OPC_SWR
= (0x2E << 26),
97 OPC_LL
= (0x30 << 26),
98 OPC_LLD
= (0x34 << 26),
99 OPC_LD
= (0x37 << 26),
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* MDMX ASE specific */
113 OPC_MDMX
= (0x1E << 26),
114 /* Cache and prefetch */
115 OPC_CACHE
= (0x2F << 26),
116 OPC_PREF
= (0x33 << 26),
117 /* Reserved major opcode */
118 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
121 /* MIPS special opcodes */
122 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
126 OPC_SLL
= 0x00 | OPC_SPECIAL
,
127 /* NOP is SLL r0, r0, 0 */
128 /* SSNOP is SLL r0, r0, 1 */
129 /* EHB is SLL r0, r0, 3 */
130 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
131 OPC_SRA
= 0x03 | OPC_SPECIAL
,
132 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
133 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
134 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
135 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
136 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
141 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
142 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
143 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
144 /* Multiplication / division */
145 OPC_MULT
= 0x18 | OPC_SPECIAL
,
146 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
147 OPC_DIV
= 0x1A | OPC_SPECIAL
,
148 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
149 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
150 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
151 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
152 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
153 /* 2 registers arithmetic / logic */
154 OPC_ADD
= 0x20 | OPC_SPECIAL
,
155 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
156 OPC_SUB
= 0x22 | OPC_SPECIAL
,
157 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
158 OPC_AND
= 0x24 | OPC_SPECIAL
,
159 OPC_OR
= 0x25 | OPC_SPECIAL
,
160 OPC_XOR
= 0x26 | OPC_SPECIAL
,
161 OPC_NOR
= 0x27 | OPC_SPECIAL
,
162 OPC_SLT
= 0x2A | OPC_SPECIAL
,
163 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
164 OPC_DADD
= 0x2C | OPC_SPECIAL
,
165 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
166 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
167 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
169 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
170 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
172 OPC_TGE
= 0x30 | OPC_SPECIAL
,
173 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
174 OPC_TLT
= 0x32 | OPC_SPECIAL
,
175 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
176 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
177 OPC_TNE
= 0x36 | OPC_SPECIAL
,
178 /* HI / LO registers load & stores */
179 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
180 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
181 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
182 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
183 /* Conditional moves */
184 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
185 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
187 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
190 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* inofficial */
191 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
192 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
193 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* inofficial */
194 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
196 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
197 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
198 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
199 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
200 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
201 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
202 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
205 /* Multiplication variants of the vr54xx. */
206 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
209 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
210 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
211 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
212 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
213 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
214 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
216 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
218 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
219 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
220 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
221 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
222 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
225 /* REGIMM (rt field) opcodes */
226 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
229 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
230 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
231 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
232 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
233 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
234 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
235 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
236 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
237 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
238 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
239 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
240 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
241 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
242 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
243 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
246 /* Special2 opcodes */
247 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
250 /* Multiply & xxx operations */
251 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
252 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
253 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
254 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
255 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
257 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
258 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
259 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
260 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
262 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
265 /* Special3 opcodes */
266 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
269 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
270 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
271 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
272 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
273 OPC_INS
= 0x04 | OPC_SPECIAL3
,
274 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
275 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
276 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
277 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
278 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
279 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
280 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
281 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
285 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
288 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
289 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
290 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
294 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
297 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
298 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
301 /* Coprocessor 0 (rs field) */
302 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
305 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
306 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
307 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
308 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
309 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
310 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
311 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
312 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
313 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
314 OPC_C0
= (0x10 << 21) | OPC_CP0
,
315 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
316 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
320 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
323 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
324 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
325 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
326 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
327 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
328 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
331 /* Coprocessor 0 (with rs == C0) */
332 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
335 OPC_TLBR
= 0x01 | OPC_C0
,
336 OPC_TLBWI
= 0x02 | OPC_C0
,
337 OPC_TLBWR
= 0x06 | OPC_C0
,
338 OPC_TLBP
= 0x08 | OPC_C0
,
339 OPC_RFE
= 0x10 | OPC_C0
,
340 OPC_ERET
= 0x18 | OPC_C0
,
341 OPC_DERET
= 0x1F | OPC_C0
,
342 OPC_WAIT
= 0x20 | OPC_C0
,
345 /* Coprocessor 1 (rs field) */
346 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
349 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
350 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
351 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
352 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
353 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
354 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
355 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
356 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
357 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
358 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
359 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
360 OPC_S_FMT
= (0x10 << 21) | OPC_CP1
, /* 16: fmt=single fp */
361 OPC_D_FMT
= (0x11 << 21) | OPC_CP1
, /* 17: fmt=double fp */
362 OPC_E_FMT
= (0x12 << 21) | OPC_CP1
, /* 18: fmt=extended fp */
363 OPC_Q_FMT
= (0x13 << 21) | OPC_CP1
, /* 19: fmt=quad fp */
364 OPC_W_FMT
= (0x14 << 21) | OPC_CP1
, /* 20: fmt=32bit fixed */
365 OPC_L_FMT
= (0x15 << 21) | OPC_CP1
, /* 21: fmt=64bit fixed */
366 OPC_PS_FMT
= (0x16 << 21) | OPC_CP1
, /* 22: fmt=paired single fp */
369 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
370 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
373 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
374 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
375 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
376 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
380 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
381 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
385 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
386 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
389 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
392 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
393 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
394 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
395 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
396 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
397 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
398 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
399 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
400 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
403 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
406 OPC_LWXC1
= 0x00 | OPC_CP3
,
407 OPC_LDXC1
= 0x01 | OPC_CP3
,
408 OPC_LUXC1
= 0x05 | OPC_CP3
,
409 OPC_SWXC1
= 0x08 | OPC_CP3
,
410 OPC_SDXC1
= 0x09 | OPC_CP3
,
411 OPC_SUXC1
= 0x0D | OPC_CP3
,
412 OPC_PREFX
= 0x0F | OPC_CP3
,
413 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
414 OPC_MADD_S
= 0x20 | OPC_CP3
,
415 OPC_MADD_D
= 0x21 | OPC_CP3
,
416 OPC_MADD_PS
= 0x26 | OPC_CP3
,
417 OPC_MSUB_S
= 0x28 | OPC_CP3
,
418 OPC_MSUB_D
= 0x29 | OPC_CP3
,
419 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
420 OPC_NMADD_S
= 0x30 | OPC_CP3
,
421 OPC_NMADD_D
= 0x31 | OPC_CP3
,
422 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
423 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
424 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
425 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
428 /* global register indices */
429 static TCGv_ptr cpu_env
;
430 static TCGv cpu_gpr
[32], cpu_PC
;
431 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
432 static TCGv cpu_dspctrl
, btarget
;
433 static TCGv_i32 bcond
;
434 static TCGv_i32 fpu_fpr32
[32], fpu_fpr32h
[32];
435 static TCGv_i64 fpu_fpr64
[32];
436 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
438 #include "gen-icount.h"
440 #define gen_helper_0i(name, arg) do { \
441 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
442 gen_helper_##name(helper_tmp); \
443 tcg_temp_free_i32(helper_tmp); \
446 #define gen_helper_1i(name, arg1, arg2) do { \
447 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
448 gen_helper_##name(arg1, helper_tmp); \
449 tcg_temp_free_i32(helper_tmp); \
452 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
453 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
454 gen_helper_##name(arg1, arg2, helper_tmp); \
455 tcg_temp_free_i32(helper_tmp); \
458 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
459 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
460 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
461 tcg_temp_free_i32(helper_tmp); \
464 typedef struct DisasContext
{
465 struct TranslationBlock
*tb
;
466 target_ulong pc
, saved_pc
;
468 /* Routine used to access memory */
470 uint32_t hflags
, saved_hflags
;
472 target_ulong btarget
;
476 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
477 * exception condition */
478 BS_STOP
= 1, /* We want to stop translation for any reason */
479 BS_BRANCH
= 2, /* We reached a branch condition */
480 BS_EXCP
= 3, /* We reached an exception condition */
483 static const char *regnames
[] =
484 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
485 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
486 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
487 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
489 static const char *regnames_HI
[] =
490 { "HI0", "HI1", "HI2", "HI3", };
492 static const char *regnames_LO
[] =
493 { "LO0", "LO1", "LO2", "LO3", };
495 static const char *regnames_ACX
[] =
496 { "ACX0", "ACX1", "ACX2", "ACX3", };
498 static const char *fregnames
[] =
499 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
500 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
501 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
502 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
504 static const char *fregnames_64
[] =
505 { "F0", "F1", "F2", "F3", "F4", "F5", "F6", "F7",
506 "F8", "F9", "F10", "F11", "F12", "F13", "F14", "F15",
507 "F16", "F17", "F18", "F19", "F20", "F21", "F22", "F23",
508 "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31", };
510 static const char *fregnames_h
[] =
511 { "h0", "h1", "h2", "h3", "h4", "h5", "h6", "h7",
512 "h8", "h9", "h10", "h11", "h12", "h13", "h14", "h15",
513 "h16", "h17", "h18", "h19", "h20", "h21", "h22", "h23",
514 "h24", "h25", "h26", "h27", "h28", "h29", "h30", "h31", };
516 #ifdef MIPS_DEBUG_DISAS
517 #define MIPS_DEBUG(fmt, args...) \
519 if (loglevel & CPU_LOG_TB_IN_ASM) { \
520 fprintf(logfile, TARGET_FMT_lx ": %08x " fmt "\n", \
521 ctx->pc, ctx->opcode , ##args); \
525 #define MIPS_DEBUG(fmt, args...) do { } while(0)
528 #define MIPS_INVAL(op) \
530 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
531 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
534 /* General purpose registers moves. */
535 static inline void gen_load_gpr (TCGv t
, int reg
)
538 tcg_gen_movi_tl(t
, 0);
540 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
543 static inline void gen_store_gpr (TCGv t
, int reg
)
546 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
549 /* Moves to/from ACX register. */
550 static inline void gen_load_ACX (TCGv t
, int reg
)
552 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
555 static inline void gen_store_ACX (TCGv t
, int reg
)
557 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
560 /* Moves to/from shadow registers. */
561 static inline void gen_load_srsgpr (int from
, int to
)
563 TCGv r_tmp1
= tcg_temp_new();
566 tcg_gen_movi_tl(r_tmp1
, 0);
568 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
569 TCGv_ptr addr
= tcg_temp_new_ptr();
571 tcg_gen_ld_i32(r_tmp2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
572 tcg_gen_shri_i32(r_tmp2
, r_tmp2
, CP0SRSCtl_PSS
);
573 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xf);
574 tcg_gen_muli_i32(r_tmp2
, r_tmp2
, sizeof(target_ulong
) * 32);
575 tcg_gen_ext_i32_ptr(addr
, r_tmp2
);
576 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
578 tcg_gen_ld_tl(r_tmp1
, addr
, sizeof(target_ulong
) * from
);
579 tcg_temp_free_ptr(addr
);
580 tcg_temp_free_i32(r_tmp2
);
582 gen_store_gpr(r_tmp1
, to
);
583 tcg_temp_free(r_tmp1
);
586 static inline void gen_store_srsgpr (int from
, int to
)
589 TCGv r_tmp1
= tcg_temp_new();
590 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
591 TCGv_ptr addr
= tcg_temp_new_ptr();
593 gen_load_gpr(r_tmp1
, from
);
594 tcg_gen_ld_i32(r_tmp2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
595 tcg_gen_shri_i32(r_tmp2
, r_tmp2
, CP0SRSCtl_PSS
);
596 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xf);
597 tcg_gen_muli_i32(r_tmp2
, r_tmp2
, sizeof(target_ulong
) * 32);
598 tcg_gen_ext_i32_ptr(addr
, r_tmp2
);
599 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
601 tcg_gen_st_tl(r_tmp1
, addr
, sizeof(target_ulong
) * to
);
602 tcg_temp_free_ptr(addr
);
603 tcg_temp_free_i32(r_tmp2
);
604 tcg_temp_free(r_tmp1
);
608 /* Floating point register moves. */
609 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
611 tcg_gen_mov_i32(t
, fpu_fpr32
[reg
]);
614 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
616 tcg_gen_mov_i32(fpu_fpr32
[reg
], t
);
619 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
621 if (ctx
->hflags
& MIPS_HFLAG_F64
)
622 tcg_gen_mov_i64(t
, fpu_fpr64
[reg
]);
624 tcg_gen_concat_i32_i64(t
, fpu_fpr32
[reg
& ~1], fpu_fpr32
[reg
| 1]);
628 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
630 if (ctx
->hflags
& MIPS_HFLAG_F64
)
631 tcg_gen_mov_i64(fpu_fpr64
[reg
], t
);
633 tcg_gen_trunc_i64_i32(fpu_fpr32
[reg
& ~1], t
);
634 tcg_gen_shri_i64(t
, t
, 32);
635 tcg_gen_trunc_i64_i32(fpu_fpr32
[reg
| 1], t
);
639 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
641 tcg_gen_mov_i32(t
, fpu_fpr32h
[reg
]);
644 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
646 tcg_gen_mov_i32(fpu_fpr32h
[reg
], t
);
649 static inline void get_fp_cond (TCGv_i32 t
)
651 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
652 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
654 tcg_gen_shri_i32(r_tmp2
, fpu_fcr31
, 24);
655 tcg_gen_andi_i32(r_tmp2
, r_tmp2
, 0xfe);
656 tcg_gen_shri_i32(r_tmp1
, fpu_fcr31
, 23);
657 tcg_gen_andi_i32(r_tmp1
, r_tmp1
, 0x1);
658 tcg_gen_or_i32(t
, r_tmp1
, r_tmp2
);
659 tcg_temp_free_i32(r_tmp1
);
660 tcg_temp_free_i32(r_tmp2
);
663 #define FOP_CONDS(type, fmt, bits) \
664 static inline void gen_cmp ## type ## _ ## fmt(int n, TCGv_i##bits a, \
665 TCGv_i##bits b, int cc) \
668 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, a, b, cc); break;\
669 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, a, b, cc); break;\
670 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, a, b, cc); break;\
671 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, a, b, cc); break;\
672 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, a, b, cc); break;\
673 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, a, b, cc); break;\
674 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, a, b, cc); break;\
675 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, a, b, cc); break;\
676 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, a, b, cc); break;\
677 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, a, b, cc); break;\
678 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, a, b, cc); break;\
679 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, a, b, cc); break;\
680 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, a, b, cc); break;\
681 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, a, b, cc); break;\
682 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, a, b, cc); break;\
683 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, a, b, cc); break;\
689 FOP_CONDS(abs
, d
, 64)
691 FOP_CONDS(abs
, s
, 32)
693 FOP_CONDS(abs
, ps
, 64)
697 #define OP_COND(name, cond) \
698 static inline void glue(gen_op_, name) (TCGv t0, TCGv t1) \
700 int l1 = gen_new_label(); \
701 int l2 = gen_new_label(); \
703 tcg_gen_brcond_tl(cond, t0, t1, l1); \
704 tcg_gen_movi_tl(t0, 0); \
707 tcg_gen_movi_tl(t0, 1); \
710 OP_COND(eq
, TCG_COND_EQ
);
711 OP_COND(ne
, TCG_COND_NE
);
712 OP_COND(ge
, TCG_COND_GE
);
713 OP_COND(geu
, TCG_COND_GEU
);
714 OP_COND(lt
, TCG_COND_LT
);
715 OP_COND(ltu
, TCG_COND_LTU
);
718 #define OP_CONDI(name, cond) \
719 static inline void glue(gen_op_, name) (TCGv t, target_ulong val) \
721 int l1 = gen_new_label(); \
722 int l2 = gen_new_label(); \
724 tcg_gen_brcondi_tl(cond, t, val, l1); \
725 tcg_gen_movi_tl(t, 0); \
728 tcg_gen_movi_tl(t, 1); \
731 OP_CONDI(lti
, TCG_COND_LT
);
732 OP_CONDI(ltiu
, TCG_COND_LTU
);
735 #define OP_CONDZ(name, cond) \
736 static inline void glue(gen_op_, name) (TCGv t) \
738 int l1 = gen_new_label(); \
739 int l2 = gen_new_label(); \
741 tcg_gen_brcondi_tl(cond, t, 0, l1); \
742 tcg_gen_movi_tl(t, 0); \
745 tcg_gen_movi_tl(t, 1); \
748 OP_CONDZ(gez
, TCG_COND_GE
);
749 OP_CONDZ(gtz
, TCG_COND_GT
);
750 OP_CONDZ(lez
, TCG_COND_LE
);
751 OP_CONDZ(ltz
, TCG_COND_LT
);
754 static inline void gen_save_pc(target_ulong pc
)
756 tcg_gen_movi_tl(cpu_PC
, pc
);
759 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
761 #if defined MIPS_DEBUG_DISAS
762 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
763 fprintf(logfile
, "hflags %08x saved %08x\n",
764 ctx
->hflags
, ctx
->saved_hflags
);
767 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
768 gen_save_pc(ctx
->pc
);
769 ctx
->saved_pc
= ctx
->pc
;
771 if (ctx
->hflags
!= ctx
->saved_hflags
) {
772 TCGv_i32 r_tmp
= tcg_temp_new_i32();
774 tcg_gen_movi_i32(r_tmp
, ctx
->hflags
);
775 tcg_gen_st_i32(r_tmp
, cpu_env
, offsetof(CPUState
, hflags
));
776 tcg_temp_free_i32(r_tmp
);
777 ctx
->saved_hflags
= ctx
->hflags
;
778 switch (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
784 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
790 static inline void restore_cpu_state (CPUState
*env
, DisasContext
*ctx
)
792 ctx
->saved_hflags
= ctx
->hflags
;
793 switch (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
799 ctx
->btarget
= env
->btarget
;
805 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
807 TCGv_i32 texcp
= tcg_const_i32(excp
);
808 TCGv_i32 terr
= tcg_const_i32(err
);
809 save_cpu_state(ctx
, 1);
810 gen_helper_raise_exception_err(texcp
, terr
);
811 tcg_temp_free_i32(terr
);
812 tcg_temp_free_i32(texcp
);
813 gen_helper_interrupt_restart();
818 generate_exception (DisasContext
*ctx
, int excp
)
820 save_cpu_state(ctx
, 1);
821 gen_helper_0i(raise_exception
, excp
);
822 gen_helper_interrupt_restart();
826 /* Addresses computation */
827 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv t0
, TCGv t1
)
829 tcg_gen_add_tl(t0
, t0
, t1
);
831 #if defined(TARGET_MIPS64)
832 /* For compatibility with 32-bit code, data reference in user mode
833 with Status_UX = 0 should be casted to 32-bit and sign extended.
834 See the MIPS64 PRA manual, section 4.10. */
835 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
836 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
837 tcg_gen_ext32s_i64(t0
, t0
);
842 static inline void check_cp0_enabled(DisasContext
*ctx
)
844 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
845 generate_exception_err(ctx
, EXCP_CpU
, 1);
848 static inline void check_cp1_enabled(DisasContext
*ctx
)
850 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
851 generate_exception_err(ctx
, EXCP_CpU
, 1);
854 /* Verify that the processor is running with COP1X instructions enabled.
855 This is associated with the nabla symbol in the MIPS32 and MIPS64
858 static inline void check_cop1x(DisasContext
*ctx
)
860 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
861 generate_exception(ctx
, EXCP_RI
);
864 /* Verify that the processor is running with 64-bit floating-point
865 operations enabled. */
867 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
869 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
870 generate_exception(ctx
, EXCP_RI
);
874 * Verify if floating point register is valid; an operation is not defined
875 * if bit 0 of any register specification is set and the FR bit in the
876 * Status register equals zero, since the register numbers specify an
877 * even-odd pair of adjacent coprocessor general registers. When the FR bit
878 * in the Status register equals one, both even and odd register numbers
879 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
881 * Multiple 64 bit wide registers can be checked by calling
882 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
884 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
886 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
887 generate_exception(ctx
, EXCP_RI
);
890 /* This code generates a "reserved instruction" exception if the
891 CPU does not support the instruction set corresponding to flags. */
892 static inline void check_insn(CPUState
*env
, DisasContext
*ctx
, int flags
)
894 if (unlikely(!(env
->insn_flags
& flags
)))
895 generate_exception(ctx
, EXCP_RI
);
898 /* This code generates a "reserved instruction" exception if 64-bit
899 instructions are not enabled. */
900 static inline void check_mips_64(DisasContext
*ctx
)
902 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
903 generate_exception(ctx
, EXCP_RI
);
906 /* load/store instructions. */
907 #define OP_LD(insn,fname) \
908 static inline void op_ldst_##insn(TCGv t0, DisasContext *ctx) \
910 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
917 #if defined(TARGET_MIPS64)
923 #define OP_ST(insn,fname) \
924 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
926 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
931 #if defined(TARGET_MIPS64)
936 #define OP_LD_ATOMIC(insn,fname) \
937 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
939 tcg_gen_mov_tl(t1, t0); \
940 tcg_gen_qemu_##fname(t0, t0, ctx->mem_idx); \
941 tcg_gen_st_tl(t1, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
943 OP_LD_ATOMIC(ll
,ld32s
);
944 #if defined(TARGET_MIPS64)
945 OP_LD_ATOMIC(lld
,ld64
);
949 #define OP_ST_ATOMIC(insn,fname,almask) \
950 static inline void op_ldst_##insn(TCGv t0, TCGv t1, DisasContext *ctx) \
952 TCGv r_tmp = tcg_temp_local_new(); \
953 int l1 = gen_new_label(); \
954 int l2 = gen_new_label(); \
955 int l3 = gen_new_label(); \
957 tcg_gen_andi_tl(r_tmp, t0, almask); \
958 tcg_gen_brcondi_tl(TCG_COND_EQ, r_tmp, 0, l1); \
959 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
960 generate_exception(ctx, EXCP_AdES); \
962 tcg_gen_ld_tl(r_tmp, cpu_env, offsetof(CPUState, CP0_LLAddr)); \
963 tcg_gen_brcond_tl(TCG_COND_NE, t0, r_tmp, l2); \
964 tcg_gen_qemu_##fname(t1, t0, ctx->mem_idx); \
965 tcg_gen_movi_tl(t0, 1); \
968 tcg_gen_movi_tl(t0, 0); \
970 tcg_temp_free(r_tmp); \
972 OP_ST_ATOMIC(sc
,st32
,0x3);
973 #if defined(TARGET_MIPS64)
974 OP_ST_ATOMIC(scd
,st64
,0x7);
979 static void gen_ldst (DisasContext
*ctx
, uint32_t opc
, int rt
,
980 int base
, int16_t offset
)
982 const char *opn
= "ldst";
983 TCGv t0
= tcg_temp_local_new();
984 TCGv t1
= tcg_temp_local_new();
987 tcg_gen_movi_tl(t0
, offset
);
988 } else if (offset
== 0) {
989 gen_load_gpr(t0
, base
);
991 gen_load_gpr(t0
, base
);
992 tcg_gen_movi_tl(t1
, offset
);
993 gen_op_addr_add(ctx
, t0
, t1
);
995 /* Don't do NOP if destination is zero: we must perform the actual
998 #if defined(TARGET_MIPS64)
1000 op_ldst_lwu(t0
, ctx
);
1001 gen_store_gpr(t0
, rt
);
1005 op_ldst_ld(t0
, ctx
);
1006 gen_store_gpr(t0
, rt
);
1010 op_ldst_lld(t0
, t1
, ctx
);
1011 gen_store_gpr(t0
, rt
);
1015 gen_load_gpr(t1
, rt
);
1016 op_ldst_sd(t0
, t1
, ctx
);
1020 save_cpu_state(ctx
, 1);
1021 gen_load_gpr(t1
, rt
);
1022 op_ldst_scd(t0
, t1
, ctx
);
1023 gen_store_gpr(t0
, rt
);
1027 save_cpu_state(ctx
, 1);
1028 gen_load_gpr(t1
, rt
);
1029 gen_helper_3i(ldl
, t1
, t0
, t1
, ctx
->mem_idx
);
1030 gen_store_gpr(t1
, rt
);
1034 save_cpu_state(ctx
, 1);
1035 gen_load_gpr(t1
, rt
);
1036 gen_helper_2i(sdl
, t0
, t1
, ctx
->mem_idx
);
1040 save_cpu_state(ctx
, 1);
1041 gen_load_gpr(t1
, rt
);
1042 gen_helper_3i(ldr
, t1
, t0
, t1
, ctx
->mem_idx
);
1043 gen_store_gpr(t1
, rt
);
1047 save_cpu_state(ctx
, 1);
1048 gen_load_gpr(t1
, rt
);
1049 gen_helper_2i(sdr
, t0
, t1
, ctx
->mem_idx
);
1054 op_ldst_lw(t0
, ctx
);
1055 gen_store_gpr(t0
, rt
);
1059 gen_load_gpr(t1
, rt
);
1060 op_ldst_sw(t0
, t1
, ctx
);
1064 op_ldst_lh(t0
, ctx
);
1065 gen_store_gpr(t0
, rt
);
1069 gen_load_gpr(t1
, rt
);
1070 op_ldst_sh(t0
, t1
, ctx
);
1074 op_ldst_lhu(t0
, ctx
);
1075 gen_store_gpr(t0
, rt
);
1079 op_ldst_lb(t0
, ctx
);
1080 gen_store_gpr(t0
, rt
);
1084 gen_load_gpr(t1
, rt
);
1085 op_ldst_sb(t0
, t1
, ctx
);
1089 op_ldst_lbu(t0
, ctx
);
1090 gen_store_gpr(t0
, rt
);
1094 save_cpu_state(ctx
, 1);
1095 gen_load_gpr(t1
, rt
);
1096 gen_helper_3i(lwl
, t1
, t0
, t1
, ctx
->mem_idx
);
1097 gen_store_gpr(t1
, rt
);
1101 save_cpu_state(ctx
, 1);
1102 gen_load_gpr(t1
, rt
);
1103 gen_helper_2i(swl
, t0
, t1
, ctx
->mem_idx
);
1107 save_cpu_state(ctx
, 1);
1108 gen_load_gpr(t1
, rt
);
1109 gen_helper_3i(lwr
, t1
, t0
, t1
, ctx
->mem_idx
);
1110 gen_store_gpr(t1
, rt
);
1114 save_cpu_state(ctx
, 1);
1115 gen_load_gpr(t1
, rt
);
1116 gen_helper_2i(swr
, t0
, t1
, ctx
->mem_idx
);
1120 op_ldst_ll(t0
, t1
, ctx
);
1121 gen_store_gpr(t0
, rt
);
1125 save_cpu_state(ctx
, 1);
1126 gen_load_gpr(t1
, rt
);
1127 op_ldst_sc(t0
, t1
, ctx
);
1128 gen_store_gpr(t0
, rt
);
1133 generate_exception(ctx
, EXCP_RI
);
1136 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1142 /* Load and store */
1143 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1144 int base
, int16_t offset
)
1146 const char *opn
= "flt_ldst";
1147 TCGv t0
= tcg_temp_local_new();
1150 tcg_gen_movi_tl(t0
, offset
);
1151 } else if (offset
== 0) {
1152 gen_load_gpr(t0
, base
);
1154 TCGv t1
= tcg_temp_local_new();
1156 gen_load_gpr(t0
, base
);
1157 tcg_gen_movi_tl(t1
, offset
);
1158 gen_op_addr_add(ctx
, t0
, t1
);
1161 /* Don't do NOP if destination is zero: we must perform the actual
1166 TCGv_i32 fp0
= tcg_temp_new_i32();
1167 TCGv t1
= tcg_temp_new();
1169 tcg_gen_qemu_ld32s(t1
, t0
, ctx
->mem_idx
);
1170 tcg_gen_trunc_tl_i32(fp0
, t1
);
1171 gen_store_fpr32(fp0
, ft
);
1173 tcg_temp_free_i32(fp0
);
1179 TCGv_i32 fp0
= tcg_temp_new_i32();
1180 TCGv t1
= tcg_temp_new();
1182 gen_load_fpr32(fp0
, ft
);
1183 tcg_gen_extu_i32_tl(t1
, fp0
);
1184 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1186 tcg_temp_free_i32(fp0
);
1192 TCGv_i64 fp0
= tcg_temp_new_i64();
1194 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1195 gen_store_fpr64(ctx
, fp0
, ft
);
1196 tcg_temp_free_i64(fp0
);
1202 TCGv_i64 fp0
= tcg_temp_new_i64();
1204 gen_load_fpr64(ctx
, fp0
, ft
);
1205 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1206 tcg_temp_free_i64(fp0
);
1212 generate_exception(ctx
, EXCP_RI
);
1215 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1220 /* Arithmetic with immediate operand */
1221 static void gen_arith_imm (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1222 int rt
, int rs
, int16_t imm
)
1225 const char *opn
= "imm arith";
1226 TCGv t0
= tcg_temp_local_new();
1228 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1229 /* If no destination, treat it as a NOP.
1230 For addi, we must generate the overflow exception when needed. */
1234 uimm
= (uint16_t)imm
;
1238 #if defined(TARGET_MIPS64)
1244 uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1249 gen_load_gpr(t0
, rs
);
1252 tcg_gen_movi_tl(t0
, imm
<< 16);
1257 #if defined(TARGET_MIPS64)
1266 gen_load_gpr(t0
, rs
);
1272 TCGv r_tmp1
= tcg_temp_new();
1273 TCGv r_tmp2
= tcg_temp_new();
1274 int l1
= gen_new_label();
1276 save_cpu_state(ctx
, 1);
1277 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1278 tcg_gen_addi_tl(t0
, r_tmp1
, uimm
);
1280 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, ~uimm
);
1281 tcg_gen_xori_tl(r_tmp2
, t0
, uimm
);
1282 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1283 tcg_temp_free(r_tmp2
);
1284 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1285 /* operands of same sign, result different sign */
1286 generate_exception(ctx
, EXCP_OVERFLOW
);
1288 tcg_temp_free(r_tmp1
);
1290 tcg_gen_ext32s_tl(t0
, t0
);
1295 tcg_gen_addi_tl(t0
, t0
, uimm
);
1296 tcg_gen_ext32s_tl(t0
, t0
);
1299 #if defined(TARGET_MIPS64)
1302 TCGv r_tmp1
= tcg_temp_new();
1303 TCGv r_tmp2
= tcg_temp_new();
1304 int l1
= gen_new_label();
1306 save_cpu_state(ctx
, 1);
1307 tcg_gen_mov_tl(r_tmp1
, t0
);
1308 tcg_gen_addi_tl(t0
, t0
, uimm
);
1310 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, ~uimm
);
1311 tcg_gen_xori_tl(r_tmp2
, t0
, uimm
);
1312 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1313 tcg_temp_free(r_tmp2
);
1314 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1315 /* operands of same sign, result different sign */
1316 generate_exception(ctx
, EXCP_OVERFLOW
);
1318 tcg_temp_free(r_tmp1
);
1323 tcg_gen_addi_tl(t0
, t0
, uimm
);
1328 gen_op_lti(t0
, uimm
);
1332 gen_op_ltiu(t0
, uimm
);
1336 tcg_gen_andi_tl(t0
, t0
, uimm
);
1340 tcg_gen_ori_tl(t0
, t0
, uimm
);
1344 tcg_gen_xori_tl(t0
, t0
, uimm
);
1351 tcg_gen_shli_tl(t0
, t0
, uimm
);
1352 tcg_gen_ext32s_tl(t0
, t0
);
1356 tcg_gen_ext32s_tl(t0
, t0
);
1357 tcg_gen_sari_tl(t0
, t0
, uimm
);
1361 switch ((ctx
->opcode
>> 21) & 0x1f) {
1364 tcg_gen_ext32u_tl(t0
, t0
);
1365 tcg_gen_shri_tl(t0
, t0
, uimm
);
1367 tcg_gen_ext32s_tl(t0
, t0
);
1372 /* rotr is decoded as srl on non-R2 CPUs */
1373 if (env
->insn_flags
& ISA_MIPS32R2
) {
1375 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1377 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1378 tcg_gen_rotri_i32(r_tmp1
, r_tmp1
, uimm
);
1379 tcg_gen_ext_i32_tl(t0
, r_tmp1
);
1380 tcg_temp_free_i32(r_tmp1
);
1385 tcg_gen_ext32u_tl(t0
, t0
);
1386 tcg_gen_shri_tl(t0
, t0
, uimm
);
1388 tcg_gen_ext32s_tl(t0
, t0
);
1394 MIPS_INVAL("invalid srl flag");
1395 generate_exception(ctx
, EXCP_RI
);
1399 #if defined(TARGET_MIPS64)
1401 tcg_gen_shli_tl(t0
, t0
, uimm
);
1405 tcg_gen_sari_tl(t0
, t0
, uimm
);
1409 switch ((ctx
->opcode
>> 21) & 0x1f) {
1411 tcg_gen_shri_tl(t0
, t0
, uimm
);
1415 /* drotr is decoded as dsrl on non-R2 CPUs */
1416 if (env
->insn_flags
& ISA_MIPS32R2
) {
1418 tcg_gen_rotri_tl(t0
, t0
, uimm
);
1422 tcg_gen_shri_tl(t0
, t0
, uimm
);
1427 MIPS_INVAL("invalid dsrl flag");
1428 generate_exception(ctx
, EXCP_RI
);
1433 tcg_gen_shli_tl(t0
, t0
, uimm
+ 32);
1437 tcg_gen_sari_tl(t0
, t0
, uimm
+ 32);
1441 switch ((ctx
->opcode
>> 21) & 0x1f) {
1443 tcg_gen_shri_tl(t0
, t0
, uimm
+ 32);
1447 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
1448 if (env
->insn_flags
& ISA_MIPS32R2
) {
1449 tcg_gen_rotri_tl(t0
, t0
, uimm
+ 32);
1452 tcg_gen_shri_tl(t0
, t0
, uimm
+ 32);
1457 MIPS_INVAL("invalid dsrl32 flag");
1458 generate_exception(ctx
, EXCP_RI
);
1465 generate_exception(ctx
, EXCP_RI
);
1468 gen_store_gpr(t0
, rt
);
1469 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1475 static void gen_arith (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1476 int rd
, int rs
, int rt
)
1478 const char *opn
= "arith";
1479 TCGv t0
= tcg_temp_local_new();
1480 TCGv t1
= tcg_temp_local_new();
1482 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1483 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1484 /* If no destination, treat it as a NOP.
1485 For add & sub, we must generate the overflow exception when needed. */
1489 gen_load_gpr(t0
, rs
);
1490 /* Specialcase the conventional move operation. */
1491 if (rt
== 0 && (opc
== OPC_ADDU
|| opc
== OPC_DADDU
1492 || opc
== OPC_SUBU
|| opc
== OPC_DSUBU
)) {
1493 gen_store_gpr(t0
, rd
);
1496 gen_load_gpr(t1
, rt
);
1500 TCGv r_tmp1
= tcg_temp_new();
1501 TCGv r_tmp2
= tcg_temp_new();
1502 int l1
= gen_new_label();
1504 save_cpu_state(ctx
, 1);
1505 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1506 tcg_gen_ext32s_tl(r_tmp2
, t1
);
1507 tcg_gen_add_tl(t0
, r_tmp1
, r_tmp2
);
1509 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t1
);
1510 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, -1);
1511 tcg_gen_xor_tl(r_tmp2
, t0
, t1
);
1512 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1513 tcg_temp_free(r_tmp2
);
1514 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1515 /* operands of same sign, result different sign */
1516 generate_exception(ctx
, EXCP_OVERFLOW
);
1518 tcg_temp_free(r_tmp1
);
1520 tcg_gen_ext32s_tl(t0
, t0
);
1525 tcg_gen_add_tl(t0
, t0
, t1
);
1526 tcg_gen_ext32s_tl(t0
, t0
);
1531 TCGv r_tmp1
= tcg_temp_new();
1532 TCGv r_tmp2
= tcg_temp_new();
1533 int l1
= gen_new_label();
1535 save_cpu_state(ctx
, 1);
1536 tcg_gen_ext32s_tl(r_tmp1
, t0
);
1537 tcg_gen_ext32s_tl(r_tmp2
, t1
);
1538 tcg_gen_sub_tl(t0
, r_tmp1
, r_tmp2
);
1540 tcg_gen_xor_tl(r_tmp2
, r_tmp1
, t1
);
1541 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t0
);
1542 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1543 tcg_temp_free(r_tmp2
);
1544 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1545 /* operands of different sign, first operand and result different sign */
1546 generate_exception(ctx
, EXCP_OVERFLOW
);
1548 tcg_temp_free(r_tmp1
);
1550 tcg_gen_ext32s_tl(t0
, t0
);
1555 tcg_gen_sub_tl(t0
, t0
, t1
);
1556 tcg_gen_ext32s_tl(t0
, t0
);
1559 #if defined(TARGET_MIPS64)
1562 TCGv r_tmp1
= tcg_temp_new();
1563 TCGv r_tmp2
= tcg_temp_new();
1564 int l1
= gen_new_label();
1566 save_cpu_state(ctx
, 1);
1567 tcg_gen_mov_tl(r_tmp1
, t0
);
1568 tcg_gen_add_tl(t0
, t0
, t1
);
1570 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t1
);
1571 tcg_gen_xori_tl(r_tmp1
, r_tmp1
, -1);
1572 tcg_gen_xor_tl(r_tmp2
, t0
, t1
);
1573 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1574 tcg_temp_free(r_tmp2
);
1575 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1576 /* operands of same sign, result different sign */
1577 generate_exception(ctx
, EXCP_OVERFLOW
);
1579 tcg_temp_free(r_tmp1
);
1584 tcg_gen_add_tl(t0
, t0
, t1
);
1589 TCGv r_tmp1
= tcg_temp_new();
1590 TCGv r_tmp2
= tcg_temp_new();
1591 int l1
= gen_new_label();
1593 save_cpu_state(ctx
, 1);
1594 tcg_gen_mov_tl(r_tmp1
, t0
);
1595 tcg_gen_sub_tl(t0
, t0
, t1
);
1597 tcg_gen_xor_tl(r_tmp2
, r_tmp1
, t1
);
1598 tcg_gen_xor_tl(r_tmp1
, r_tmp1
, t0
);
1599 tcg_gen_and_tl(r_tmp1
, r_tmp1
, r_tmp2
);
1600 tcg_temp_free(r_tmp2
);
1601 tcg_gen_brcondi_tl(TCG_COND_GE
, r_tmp1
, 0, l1
);
1602 /* operands of different sign, first operand and result different sign */
1603 generate_exception(ctx
, EXCP_OVERFLOW
);
1605 tcg_temp_free(r_tmp1
);
1610 tcg_gen_sub_tl(t0
, t0
, t1
);
1623 tcg_gen_and_tl(t0
, t0
, t1
);
1627 tcg_gen_or_tl(t0
, t0
, t1
);
1628 tcg_gen_not_tl(t0
, t0
);
1632 tcg_gen_or_tl(t0
, t0
, t1
);
1636 tcg_gen_xor_tl(t0
, t0
, t1
);
1640 tcg_gen_mul_tl(t0
, t0
, t1
);
1641 tcg_gen_ext32s_tl(t0
, t0
);
1646 int l1
= gen_new_label();
1648 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1649 gen_store_gpr(t0
, rd
);
1656 int l1
= gen_new_label();
1658 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
1659 gen_store_gpr(t0
, rd
);
1665 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1666 tcg_gen_shl_tl(t0
, t1
, t0
);
1667 tcg_gen_ext32s_tl(t0
, t0
);
1671 tcg_gen_ext32s_tl(t1
, t1
);
1672 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1673 tcg_gen_sar_tl(t0
, t1
, t0
);
1677 switch ((ctx
->opcode
>> 6) & 0x1f) {
1679 tcg_gen_ext32u_tl(t1
, t1
);
1680 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1681 tcg_gen_shr_tl(t0
, t1
, t0
);
1682 tcg_gen_ext32s_tl(t0
, t0
);
1686 /* rotrv is decoded as srlv on non-R2 CPUs */
1687 if (env
->insn_flags
& ISA_MIPS32R2
) {
1688 int l1
= gen_new_label();
1689 int l2
= gen_new_label();
1691 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1692 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
1694 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1695 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
1697 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1698 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1699 tcg_gen_rotr_i32(r_tmp1
, r_tmp1
, r_tmp2
);
1700 tcg_temp_free_i32(r_tmp1
);
1701 tcg_temp_free_i32(r_tmp2
);
1705 tcg_gen_mov_tl(t0
, t1
);
1709 tcg_gen_ext32u_tl(t1
, t1
);
1710 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1711 tcg_gen_shr_tl(t0
, t1
, t0
);
1712 tcg_gen_ext32s_tl(t0
, t0
);
1717 MIPS_INVAL("invalid srlv flag");
1718 generate_exception(ctx
, EXCP_RI
);
1722 #if defined(TARGET_MIPS64)
1724 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1725 tcg_gen_shl_tl(t0
, t1
, t0
);
1729 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1730 tcg_gen_sar_tl(t0
, t1
, t0
);
1734 switch ((ctx
->opcode
>> 6) & 0x1f) {
1736 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1737 tcg_gen_shr_tl(t0
, t1
, t0
);
1741 /* drotrv is decoded as dsrlv on non-R2 CPUs */
1742 if (env
->insn_flags
& ISA_MIPS32R2
) {
1743 int l1
= gen_new_label();
1744 int l2
= gen_new_label();
1746 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1747 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
1749 tcg_gen_rotr_tl(t0
, t1
, t0
);
1753 tcg_gen_mov_tl(t0
, t1
);
1757 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1758 tcg_gen_shr_tl(t0
, t1
, t0
);
1763 MIPS_INVAL("invalid dsrlv flag");
1764 generate_exception(ctx
, EXCP_RI
);
1771 generate_exception(ctx
, EXCP_RI
);
1774 gen_store_gpr(t0
, rd
);
1776 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1782 /* Arithmetic on HI/LO registers */
1783 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
1785 const char *opn
= "hilo";
1786 TCGv t0
= tcg_temp_local_new();
1788 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
1795 tcg_gen_mov_tl(t0
, cpu_HI
[0]);
1796 gen_store_gpr(t0
, reg
);
1800 tcg_gen_mov_tl(t0
, cpu_LO
[0]);
1801 gen_store_gpr(t0
, reg
);
1805 gen_load_gpr(t0
, reg
);
1806 tcg_gen_mov_tl(cpu_HI
[0], t0
);
1810 gen_load_gpr(t0
, reg
);
1811 tcg_gen_mov_tl(cpu_LO
[0], t0
);
1816 generate_exception(ctx
, EXCP_RI
);
1819 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
1824 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
1827 const char *opn
= "mul/div";
1828 TCGv t0
= tcg_temp_local_new();
1829 TCGv t1
= tcg_temp_local_new();
1831 gen_load_gpr(t0
, rs
);
1832 gen_load_gpr(t1
, rt
);
1836 int l1
= gen_new_label();
1838 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1840 int l2
= gen_new_label();
1841 TCGv_i32 r_tmp1
= tcg_temp_local_new_i32();
1842 TCGv_i32 r_tmp2
= tcg_temp_local_new_i32();
1843 TCGv_i32 r_tmp3
= tcg_temp_local_new_i32();
1845 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1846 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1847 tcg_gen_brcondi_i32(TCG_COND_NE
, r_tmp1
, -1 << 31, l2
);
1848 tcg_gen_brcondi_i32(TCG_COND_NE
, r_tmp2
, -1, l2
);
1849 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1850 tcg_gen_movi_tl(cpu_HI
[0], 0);
1853 tcg_gen_div_i32(r_tmp3
, r_tmp1
, r_tmp2
);
1854 tcg_gen_rem_i32(r_tmp2
, r_tmp1
, r_tmp2
);
1855 tcg_gen_ext_i32_tl(cpu_LO
[0], r_tmp3
);
1856 tcg_gen_ext_i32_tl(cpu_HI
[0], r_tmp2
);
1857 tcg_temp_free_i32(r_tmp1
);
1858 tcg_temp_free_i32(r_tmp2
);
1859 tcg_temp_free_i32(r_tmp3
);
1867 int l1
= gen_new_label();
1869 tcg_gen_ext32s_tl(t1
, t1
);
1870 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1872 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
1873 TCGv_i32 r_tmp2
= tcg_temp_new_i32();
1874 TCGv_i32 r_tmp3
= tcg_temp_new_i32();
1876 tcg_gen_trunc_tl_i32(r_tmp1
, t0
);
1877 tcg_gen_trunc_tl_i32(r_tmp2
, t1
);
1878 tcg_gen_divu_i32(r_tmp3
, r_tmp1
, r_tmp2
);
1879 tcg_gen_remu_i32(r_tmp1
, r_tmp1
, r_tmp2
);
1880 tcg_gen_ext_i32_tl(cpu_LO
[0], r_tmp3
);
1881 tcg_gen_ext_i32_tl(cpu_HI
[0], r_tmp1
);
1882 tcg_temp_free_i32(r_tmp1
);
1883 tcg_temp_free_i32(r_tmp2
);
1884 tcg_temp_free_i32(r_tmp3
);
1892 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1893 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1895 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
1896 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
1897 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1898 tcg_temp_free_i64(r_tmp2
);
1899 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1900 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1901 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1902 tcg_temp_free_i64(r_tmp1
);
1903 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1904 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
1910 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1911 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1913 tcg_gen_ext32u_tl(t0
, t0
);
1914 tcg_gen_ext32u_tl(t1
, t1
);
1915 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
1916 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
1917 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1918 tcg_temp_free_i64(r_tmp2
);
1919 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1920 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1921 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1922 tcg_temp_free_i64(r_tmp1
);
1923 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1924 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
1928 #if defined(TARGET_MIPS64)
1931 int l1
= gen_new_label();
1933 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1935 int l2
= gen_new_label();
1937 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
1938 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
1939 tcg_gen_mov_tl(cpu_LO
[0], t0
);
1940 tcg_gen_movi_tl(cpu_HI
[0], 0);
1943 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
1944 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
1952 int l1
= gen_new_label();
1954 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1955 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
1956 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
1962 gen_helper_dmult(t0
, t1
);
1966 gen_helper_dmultu(t0
, t1
);
1972 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1973 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1975 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
1976 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
1977 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1978 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
1979 tcg_gen_add_i64(r_tmp1
, r_tmp1
, r_tmp2
);
1980 tcg_temp_free_i64(r_tmp2
);
1981 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
1982 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
1983 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
1984 tcg_temp_free_i64(r_tmp1
);
1985 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
1986 tcg_gen_ext32s_tl(cpu_LO
[1], t1
);
1992 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
1993 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
1995 tcg_gen_ext32u_tl(t0
, t0
);
1996 tcg_gen_ext32u_tl(t1
, t1
);
1997 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
1998 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
1999 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2000 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
2001 tcg_gen_add_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2002 tcg_temp_free_i64(r_tmp2
);
2003 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
2004 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
2005 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
2006 tcg_temp_free_i64(r_tmp1
);
2007 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2008 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2014 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
2015 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
2017 tcg_gen_ext_tl_i64(r_tmp1
, t0
);
2018 tcg_gen_ext_tl_i64(r_tmp2
, t1
);
2019 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2020 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
2021 tcg_gen_sub_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2022 tcg_temp_free_i64(r_tmp2
);
2023 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
2024 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
2025 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
2026 tcg_temp_free_i64(r_tmp1
);
2027 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2028 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2034 TCGv_i64 r_tmp1
= tcg_temp_new_i64();
2035 TCGv_i64 r_tmp2
= tcg_temp_new_i64();
2037 tcg_gen_ext32u_tl(t0
, t0
);
2038 tcg_gen_ext32u_tl(t1
, t1
);
2039 tcg_gen_extu_tl_i64(r_tmp1
, t0
);
2040 tcg_gen_extu_tl_i64(r_tmp2
, t1
);
2041 tcg_gen_mul_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2042 tcg_gen_concat_tl_i64(r_tmp2
, cpu_LO
[0], cpu_HI
[0]);
2043 tcg_gen_sub_i64(r_tmp1
, r_tmp1
, r_tmp2
);
2044 tcg_temp_free_i64(r_tmp2
);
2045 tcg_gen_trunc_i64_tl(t0
, r_tmp1
);
2046 tcg_gen_shri_i64(r_tmp1
, r_tmp1
, 32);
2047 tcg_gen_trunc_i64_tl(t1
, r_tmp1
);
2048 tcg_temp_free_i64(r_tmp1
);
2049 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2050 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2056 generate_exception(ctx
, EXCP_RI
);
2059 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2065 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2066 int rd
, int rs
, int rt
)
2068 const char *opn
= "mul vr54xx";
2069 TCGv t0
= tcg_temp_local_new();
2070 TCGv t1
= tcg_temp_local_new();
2072 gen_load_gpr(t0
, rs
);
2073 gen_load_gpr(t1
, rt
);
2076 case OPC_VR54XX_MULS
:
2077 gen_helper_muls(t0
, t0
, t1
);
2080 case OPC_VR54XX_MULSU
:
2081 gen_helper_mulsu(t0
, t0
, t1
);
2084 case OPC_VR54XX_MACC
:
2085 gen_helper_macc(t0
, t0
, t1
);
2088 case OPC_VR54XX_MACCU
:
2089 gen_helper_maccu(t0
, t0
, t1
);
2092 case OPC_VR54XX_MSAC
:
2093 gen_helper_msac(t0
, t0
, t1
);
2096 case OPC_VR54XX_MSACU
:
2097 gen_helper_msacu(t0
, t0
, t1
);
2100 case OPC_VR54XX_MULHI
:
2101 gen_helper_mulhi(t0
, t0
, t1
);
2104 case OPC_VR54XX_MULHIU
:
2105 gen_helper_mulhiu(t0
, t0
, t1
);
2108 case OPC_VR54XX_MULSHI
:
2109 gen_helper_mulshi(t0
, t0
, t1
);
2112 case OPC_VR54XX_MULSHIU
:
2113 gen_helper_mulshiu(t0
, t0
, t1
);
2116 case OPC_VR54XX_MACCHI
:
2117 gen_helper_macchi(t0
, t0
, t1
);
2120 case OPC_VR54XX_MACCHIU
:
2121 gen_helper_macchiu(t0
, t0
, t1
);
2124 case OPC_VR54XX_MSACHI
:
2125 gen_helper_msachi(t0
, t0
, t1
);
2128 case OPC_VR54XX_MSACHIU
:
2129 gen_helper_msachiu(t0
, t0
, t1
);
2133 MIPS_INVAL("mul vr54xx");
2134 generate_exception(ctx
, EXCP_RI
);
2137 gen_store_gpr(t0
, rd
);
2138 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2145 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2148 const char *opn
= "CLx";
2149 TCGv t0
= tcg_temp_local_new();
2156 gen_load_gpr(t0
, rs
);
2159 gen_helper_clo(t0
, t0
);
2163 gen_helper_clz(t0
, t0
);
2166 #if defined(TARGET_MIPS64)
2168 gen_helper_dclo(t0
, t0
);
2172 gen_helper_dclz(t0
, t0
);
2178 generate_exception(ctx
, EXCP_RI
);
2181 gen_store_gpr(t0
, rd
);
2182 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2189 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2190 int rs
, int rt
, int16_t imm
)
2193 TCGv t0
= tcg_temp_local_new();
2194 TCGv t1
= tcg_temp_local_new();
2197 /* Load needed operands */
2205 /* Compare two registers */
2207 gen_load_gpr(t0
, rs
);
2208 gen_load_gpr(t1
, rt
);
2218 /* Compare register to immediate */
2219 if (rs
!= 0 || imm
!= 0) {
2220 gen_load_gpr(t0
, rs
);
2221 tcg_gen_movi_tl(t1
, (int32_t)imm
);
2228 case OPC_TEQ
: /* rs == rs */
2229 case OPC_TEQI
: /* r0 == 0 */
2230 case OPC_TGE
: /* rs >= rs */
2231 case OPC_TGEI
: /* r0 >= 0 */
2232 case OPC_TGEU
: /* rs >= rs unsigned */
2233 case OPC_TGEIU
: /* r0 >= 0 unsigned */
2235 tcg_gen_movi_tl(t0
, 1);
2237 case OPC_TLT
: /* rs < rs */
2238 case OPC_TLTI
: /* r0 < 0 */
2239 case OPC_TLTU
: /* rs < rs unsigned */
2240 case OPC_TLTIU
: /* r0 < 0 unsigned */
2241 case OPC_TNE
: /* rs != rs */
2242 case OPC_TNEI
: /* r0 != 0 */
2243 /* Never trap: treat as NOP. */
2247 generate_exception(ctx
, EXCP_RI
);
2278 generate_exception(ctx
, EXCP_RI
);
2282 save_cpu_state(ctx
, 1);
2284 int l1
= gen_new_label();
2286 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
2287 gen_helper_0i(raise_exception
, EXCP_TRAP
);
2290 ctx
->bstate
= BS_STOP
;
2296 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
2298 TranslationBlock
*tb
;
2300 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
)) {
2303 tcg_gen_exit_tb((long)tb
+ n
);
2310 /* Branches (before delay slot) */
2311 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
2312 int rs
, int rt
, int32_t offset
)
2314 target_ulong btgt
= -1;
2316 int bcond_compute
= 0;
2317 TCGv t0
= tcg_temp_local_new();
2318 TCGv t1
= tcg_temp_local_new();
2320 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2321 #ifdef MIPS_DEBUG_DISAS
2322 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
2324 "Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n",
2328 generate_exception(ctx
, EXCP_RI
);
2332 /* Load needed operands */
2338 /* Compare two registers */
2340 gen_load_gpr(t0
, rs
);
2341 gen_load_gpr(t1
, rt
);
2344 btgt
= ctx
->pc
+ 4 + offset
;
2358 /* Compare to zero */
2360 gen_load_gpr(t0
, rs
);
2363 btgt
= ctx
->pc
+ 4 + offset
;
2367 /* Jump to immediate */
2368 btgt
= ((ctx
->pc
+ 4) & (int32_t)0xF0000000) | (uint32_t)offset
;
2372 /* Jump to register */
2373 if (offset
!= 0 && offset
!= 16) {
2374 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2375 others are reserved. */
2376 MIPS_INVAL("jump hint");
2377 generate_exception(ctx
, EXCP_RI
);
2380 gen_load_gpr(btarget
, rs
);
2383 MIPS_INVAL("branch/jump");
2384 generate_exception(ctx
, EXCP_RI
);
2387 if (bcond_compute
== 0) {
2388 /* No condition to be computed */
2390 case OPC_BEQ
: /* rx == rx */
2391 case OPC_BEQL
: /* rx == rx likely */
2392 case OPC_BGEZ
: /* 0 >= 0 */
2393 case OPC_BGEZL
: /* 0 >= 0 likely */
2394 case OPC_BLEZ
: /* 0 <= 0 */
2395 case OPC_BLEZL
: /* 0 <= 0 likely */
2397 ctx
->hflags
|= MIPS_HFLAG_B
;
2398 MIPS_DEBUG("balways");
2400 case OPC_BGEZAL
: /* 0 >= 0 */
2401 case OPC_BGEZALL
: /* 0 >= 0 likely */
2402 /* Always take and link */
2404 ctx
->hflags
|= MIPS_HFLAG_B
;
2405 MIPS_DEBUG("balways and link");
2407 case OPC_BNE
: /* rx != rx */
2408 case OPC_BGTZ
: /* 0 > 0 */
2409 case OPC_BLTZ
: /* 0 < 0 */
2411 MIPS_DEBUG("bnever (NOP)");
2413 case OPC_BLTZAL
: /* 0 < 0 */
2414 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2415 gen_store_gpr(t0
, 31);
2416 MIPS_DEBUG("bnever and link");
2418 case OPC_BLTZALL
: /* 0 < 0 likely */
2419 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2420 gen_store_gpr(t0
, 31);
2421 /* Skip the instruction in the delay slot */
2422 MIPS_DEBUG("bnever, link and skip");
2425 case OPC_BNEL
: /* rx != rx likely */
2426 case OPC_BGTZL
: /* 0 > 0 likely */
2427 case OPC_BLTZL
: /* 0 < 0 likely */
2428 /* Skip the instruction in the delay slot */
2429 MIPS_DEBUG("bnever and skip");
2433 ctx
->hflags
|= MIPS_HFLAG_B
;
2434 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
2438 ctx
->hflags
|= MIPS_HFLAG_B
;
2439 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
2442 ctx
->hflags
|= MIPS_HFLAG_BR
;
2443 MIPS_DEBUG("jr %s", regnames
[rs
]);
2447 ctx
->hflags
|= MIPS_HFLAG_BR
;
2448 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
2451 MIPS_INVAL("branch/jump");
2452 generate_exception(ctx
, EXCP_RI
);
2459 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
2460 regnames
[rs
], regnames
[rt
], btgt
);
2464 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
2465 regnames
[rs
], regnames
[rt
], btgt
);
2469 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
2470 regnames
[rs
], regnames
[rt
], btgt
);
2474 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
2475 regnames
[rs
], regnames
[rt
], btgt
);
2479 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2483 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2487 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2493 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2497 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2501 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2505 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2509 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2513 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2517 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2522 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2524 ctx
->hflags
|= MIPS_HFLAG_BC
;
2525 tcg_gen_trunc_tl_i32(bcond
, t0
);
2530 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2532 ctx
->hflags
|= MIPS_HFLAG_BL
;
2533 tcg_gen_trunc_tl_i32(bcond
, t0
);
2536 MIPS_INVAL("conditional branch/jump");
2537 generate_exception(ctx
, EXCP_RI
);
2541 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
2542 blink
, ctx
->hflags
, btgt
);
2544 ctx
->btarget
= btgt
;
2546 tcg_gen_movi_tl(t0
, ctx
->pc
+ 8);
2547 gen_store_gpr(t0
, blink
);
2555 /* special3 bitfield operations */
2556 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
2557 int rs
, int lsb
, int msb
)
2559 TCGv t0
= tcg_temp_new();
2560 TCGv t1
= tcg_temp_new();
2563 gen_load_gpr(t1
, rs
);
2568 tcg_gen_shri_tl(t0
, t1
, lsb
);
2570 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
2572 tcg_gen_ext32s_tl(t0
, t0
);
2575 #if defined(TARGET_MIPS64)
2577 tcg_gen_shri_tl(t0
, t1
, lsb
);
2579 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
2583 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
2584 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2587 tcg_gen_shri_tl(t0
, t1
, lsb
);
2588 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2594 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
2595 gen_load_gpr(t0
, rt
);
2596 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2597 tcg_gen_shli_tl(t1
, t1
, lsb
);
2598 tcg_gen_andi_tl(t1
, t1
, mask
);
2599 tcg_gen_or_tl(t0
, t0
, t1
);
2600 tcg_gen_ext32s_tl(t0
, t0
);
2602 #if defined(TARGET_MIPS64)
2606 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
2607 gen_load_gpr(t0
, rt
);
2608 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2609 tcg_gen_shli_tl(t1
, t1
, lsb
);
2610 tcg_gen_andi_tl(t1
, t1
, mask
);
2611 tcg_gen_or_tl(t0
, t0
, t1
);
2616 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
2617 gen_load_gpr(t0
, rt
);
2618 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2619 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
2620 tcg_gen_andi_tl(t1
, t1
, mask
);
2621 tcg_gen_or_tl(t0
, t0
, t1
);
2626 gen_load_gpr(t0
, rt
);
2627 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
2628 gen_load_gpr(t0
, rt
);
2629 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2630 tcg_gen_shli_tl(t1
, t1
, lsb
);
2631 tcg_gen_andi_tl(t1
, t1
, mask
);
2632 tcg_gen_or_tl(t0
, t0
, t1
);
2637 MIPS_INVAL("bitops");
2638 generate_exception(ctx
, EXCP_RI
);
2643 gen_store_gpr(t0
, rt
);
2648 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
2650 TCGv t0
= tcg_temp_new();
2651 TCGv t1
= tcg_temp_new();
2653 gen_load_gpr(t1
, rt
);
2656 tcg_gen_shri_tl(t0
, t1
, 8);
2657 tcg_gen_andi_tl(t0
, t0
, 0x00FF00FF);
2658 tcg_gen_shli_tl(t1
, t1
, 8);
2659 tcg_gen_andi_tl(t1
, t1
, ~0x00FF00FF);
2660 tcg_gen_or_tl(t0
, t0
, t1
);
2661 tcg_gen_ext32s_tl(t0
, t0
);
2664 tcg_gen_ext8s_tl(t0
, t1
);
2667 tcg_gen_ext16s_tl(t0
, t1
);
2669 #if defined(TARGET_MIPS64)
2671 gen_load_gpr(t1
, rt
);
2672 tcg_gen_shri_tl(t0
, t1
, 8);
2673 tcg_gen_andi_tl(t0
, t0
, 0x00FF00FF00FF00FFULL
);
2674 tcg_gen_shli_tl(t1
, t1
, 8);
2675 tcg_gen_andi_tl(t1
, t1
, ~0x00FF00FF00FF00FFULL
);
2676 tcg_gen_or_tl(t0
, t0
, t1
);
2679 gen_load_gpr(t1
, rt
);
2680 tcg_gen_shri_tl(t0
, t1
, 16);
2681 tcg_gen_andi_tl(t0
, t0
, 0x0000FFFF0000FFFFULL
);
2682 tcg_gen_shli_tl(t1
, t1
, 16);
2683 tcg_gen_andi_tl(t1
, t1
, ~0x0000FFFF0000FFFFULL
);
2684 tcg_gen_or_tl(t1
, t0
, t1
);
2685 tcg_gen_shri_tl(t0
, t1
, 32);
2686 tcg_gen_shli_tl(t1
, t1
, 32);
2687 tcg_gen_or_tl(t0
, t0
, t1
);
2691 MIPS_INVAL("bsfhl");
2692 generate_exception(ctx
, EXCP_RI
);
2697 gen_store_gpr(t0
, rd
);
2702 #ifndef CONFIG_USER_ONLY
2703 /* CP0 (MMU and control) */
2704 static inline void gen_mfc0_load32 (TCGv t
, target_ulong off
)
2706 TCGv_i32 r_tmp
= tcg_temp_new_i32();
2708 tcg_gen_ld_i32(r_tmp
, cpu_env
, off
);
2709 tcg_gen_ext_i32_tl(t
, r_tmp
);
2710 tcg_temp_free_i32(r_tmp
);
2713 static inline void gen_mfc0_load64 (TCGv t
, target_ulong off
)
2715 tcg_gen_ld_tl(t
, cpu_env
, off
);
2716 tcg_gen_ext32s_tl(t
, t
);
2719 static inline void gen_mtc0_store32 (TCGv t
, target_ulong off
)
2721 TCGv_i32 r_tmp
= tcg_temp_new_i32();
2723 tcg_gen_trunc_tl_i32(r_tmp
, t
);
2724 tcg_gen_st_i32(r_tmp
, cpu_env
, off
);
2725 tcg_temp_free_i32(r_tmp
);
2728 static inline void gen_mtc0_store64 (TCGv t
, target_ulong off
)
2730 tcg_gen_ext32s_tl(t
, t
);
2731 tcg_gen_st_tl(t
, cpu_env
, off
);
2734 static void gen_mfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
2736 const char *rn
= "invalid";
2739 check_insn(env
, ctx
, ISA_MIPS32
);
2745 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Index
));
2749 check_insn(env
, ctx
, ASE_MT
);
2750 gen_helper_mfc0_mvpcontrol(t0
);
2754 check_insn(env
, ctx
, ASE_MT
);
2755 gen_helper_mfc0_mvpconf0(t0
);
2759 check_insn(env
, ctx
, ASE_MT
);
2760 gen_helper_mfc0_mvpconf1(t0
);
2770 gen_helper_mfc0_random(t0
);
2774 check_insn(env
, ctx
, ASE_MT
);
2775 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEControl
));
2779 check_insn(env
, ctx
, ASE_MT
);
2780 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf0
));
2784 check_insn(env
, ctx
, ASE_MT
);
2785 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf1
));
2789 check_insn(env
, ctx
, ASE_MT
);
2790 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_YQMask
));
2794 check_insn(env
, ctx
, ASE_MT
);
2795 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_VPESchedule
));
2799 check_insn(env
, ctx
, ASE_MT
);
2800 gen_mfc0_load64(t0
, offsetof(CPUState
, CP0_VPEScheFBack
));
2801 rn
= "VPEScheFBack";
2804 check_insn(env
, ctx
, ASE_MT
);
2805 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEOpt
));
2815 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
2816 tcg_gen_ext32s_tl(t0
, t0
);
2820 check_insn(env
, ctx
, ASE_MT
);
2821 gen_helper_mfc0_tcstatus(t0
);
2825 check_insn(env
, ctx
, ASE_MT
);
2826 gen_helper_mfc0_tcbind(t0
);
2830 check_insn(env
, ctx
, ASE_MT
);
2831 gen_helper_mfc0_tcrestart(t0
);
2835 check_insn(env
, ctx
, ASE_MT
);
2836 gen_helper_mfc0_tchalt(t0
);
2840 check_insn(env
, ctx
, ASE_MT
);
2841 gen_helper_mfc0_tccontext(t0
);
2845 check_insn(env
, ctx
, ASE_MT
);
2846 gen_helper_mfc0_tcschedule(t0
);
2850 check_insn(env
, ctx
, ASE_MT
);
2851 gen_helper_mfc0_tcschefback(t0
);
2861 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
2862 tcg_gen_ext32s_tl(t0
, t0
);
2872 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_Context
));
2873 tcg_gen_ext32s_tl(t0
, t0
);
2877 // gen_helper_mfc0_contextconfig(t0); /* SmartMIPS ASE */
2878 rn
= "ContextConfig";
2887 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageMask
));
2891 check_insn(env
, ctx
, ISA_MIPS32R2
);
2892 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageGrain
));
2902 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Wired
));
2906 check_insn(env
, ctx
, ISA_MIPS32R2
);
2907 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf0
));
2911 check_insn(env
, ctx
, ISA_MIPS32R2
);
2912 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf1
));
2916 check_insn(env
, ctx
, ISA_MIPS32R2
);
2917 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf2
));
2921 check_insn(env
, ctx
, ISA_MIPS32R2
);
2922 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf3
));
2926 check_insn(env
, ctx
, ISA_MIPS32R2
);
2927 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf4
));
2937 check_insn(env
, ctx
, ISA_MIPS32R2
);
2938 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_HWREna
));
2948 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
2949 tcg_gen_ext32s_tl(t0
, t0
);
2959 /* Mark as an IO operation because we read the time. */
2962 gen_helper_mfc0_count(t0
);
2965 ctx
->bstate
= BS_STOP
;
2969 /* 6,7 are implementation dependent */
2977 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
2978 tcg_gen_ext32s_tl(t0
, t0
);
2988 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Compare
));
2991 /* 6,7 are implementation dependent */
2999 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Status
));
3003 check_insn(env
, ctx
, ISA_MIPS32R2
);
3004 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_IntCtl
));
3008 check_insn(env
, ctx
, ISA_MIPS32R2
);
3009 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSCtl
));
3013 check_insn(env
, ctx
, ISA_MIPS32R2
);
3014 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSMap
));
3024 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Cause
));
3034 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
3035 tcg_gen_ext32s_tl(t0
, t0
);
3045 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PRid
));
3049 check_insn(env
, ctx
, ISA_MIPS32R2
);
3050 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_EBase
));
3060 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config0
));
3064 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config1
));
3068 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config2
));
3072 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config3
));
3075 /* 4,5 are reserved */
3076 /* 6,7 are implementation dependent */
3078 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config6
));
3082 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config7
));
3092 gen_helper_mfc0_lladdr(t0
);
3102 gen_helper_1i(mfc0_watchlo
, t0
, sel
);
3112 gen_helper_1i(mfc0_watchhi
, t0
, sel
);
3122 #if defined(TARGET_MIPS64)
3123 check_insn(env
, ctx
, ISA_MIPS3
);
3124 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
3125 tcg_gen_ext32s_tl(t0
, t0
);
3134 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3137 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Framemask
));
3145 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3146 rn
= "'Diagnostic"; /* implementation dependent */
3151 gen_helper_mfc0_debug(t0
); /* EJTAG support */
3155 // gen_helper_mfc0_tracecontrol(t0); /* PDtrace support */
3156 rn
= "TraceControl";
3159 // gen_helper_mfc0_tracecontrol2(t0); /* PDtrace support */
3160 rn
= "TraceControl2";
3163 // gen_helper_mfc0_usertracedata(t0); /* PDtrace support */
3164 rn
= "UserTraceData";
3167 // gen_helper_mfc0_tracebpc(t0); /* PDtrace support */
3178 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
3179 tcg_gen_ext32s_tl(t0
, t0
);
3189 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Performance0
));
3190 rn
= "Performance0";
3193 // gen_helper_mfc0_performance1(t0);
3194 rn
= "Performance1";
3197 // gen_helper_mfc0_performance2(t0);
3198 rn
= "Performance2";
3201 // gen_helper_mfc0_performance3(t0);
3202 rn
= "Performance3";
3205 // gen_helper_mfc0_performance4(t0);
3206 rn
= "Performance4";
3209 // gen_helper_mfc0_performance5(t0);
3210 rn
= "Performance5";
3213 // gen_helper_mfc0_performance6(t0);
3214 rn
= "Performance6";
3217 // gen_helper_mfc0_performance7(t0);
3218 rn
= "Performance7";
3225 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3231 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
3244 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagLo
));
3251 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataLo
));
3264 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagHi
));
3271 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataHi
));
3281 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
3282 tcg_gen_ext32s_tl(t0
, t0
);
3293 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DESAVE
));
3303 #if defined MIPS_DEBUG_DISAS
3304 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
3305 fprintf(logfile
, "mfc0 %s (reg %d sel %d)\n",
3312 #if defined MIPS_DEBUG_DISAS
3313 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
3314 fprintf(logfile
, "mfc0 %s (reg %d sel %d)\n",
3318 generate_exception(ctx
, EXCP_RI
);
3321 static void gen_mtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
3323 const char *rn
= "invalid";
3326 check_insn(env
, ctx
, ISA_MIPS32
);
3335 gen_helper_mtc0_index(t0
);
3339 check_insn(env
, ctx
, ASE_MT
);
3340 gen_helper_mtc0_mvpcontrol(t0
);
3344 check_insn(env
, ctx
, ASE_MT
);
3349 check_insn(env
, ctx
, ASE_MT
);
3364 check_insn(env
, ctx
, ASE_MT
);
3365 gen_helper_mtc0_vpecontrol(t0
);
3369 check_insn(env
, ctx
, ASE_MT
);
3370 gen_helper_mtc0_vpeconf0(t0
);
3374 check_insn(env
, ctx
, ASE_MT
);
3375 gen_helper_mtc0_vpeconf1(t0
);
3379 check_insn(env
, ctx
, ASE_MT
);
3380 gen_helper_mtc0_yqmask(t0
);
3384 check_insn(env
, ctx
, ASE_MT
);
3385 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_VPESchedule
));
3389 check_insn(env
, ctx
, ASE_MT
);
3390 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_VPEScheFBack
));
3391 rn
= "VPEScheFBack";
3394 check_insn(env
, ctx
, ASE_MT
);
3395 gen_helper_mtc0_vpeopt(t0
);
3405 gen_helper_mtc0_entrylo0(t0
);
3409 check_insn(env
, ctx
, ASE_MT
);
3410 gen_helper_mtc0_tcstatus(t0
);
3414 check_insn(env
, ctx
, ASE_MT
);
3415 gen_helper_mtc0_tcbind(t0
);
3419 check_insn(env
, ctx
, ASE_MT
);
3420 gen_helper_mtc0_tcrestart(t0
);
3424 check_insn(env
, ctx
, ASE_MT
);
3425 gen_helper_mtc0_tchalt(t0
);
3429 check_insn(env
, ctx
, ASE_MT
);
3430 gen_helper_mtc0_tccontext(t0
);
3434 check_insn(env
, ctx
, ASE_MT
);
3435 gen_helper_mtc0_tcschedule(t0
);
3439 check_insn(env
, ctx
, ASE_MT
);
3440 gen_helper_mtc0_tcschefback(t0
);
3450 gen_helper_mtc0_entrylo1(t0
);
3460 gen_helper_mtc0_context(t0
);
3464 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
3465 rn
= "ContextConfig";
3474 gen_helper_mtc0_pagemask(t0
);
3478 check_insn(env
, ctx
, ISA_MIPS32R2
);
3479 gen_helper_mtc0_pagegrain(t0
);
3489 gen_helper_mtc0_wired(t0
);
3493 check_insn(env
, ctx
, ISA_MIPS32R2
);
3494 gen_helper_mtc0_srsconf0(t0
);
3498 check_insn(env
, ctx
, ISA_MIPS32R2
);
3499 gen_helper_mtc0_srsconf1(t0
);
3503 check_insn(env
, ctx
, ISA_MIPS32R2
);
3504 gen_helper_mtc0_srsconf2(t0
);
3508 check_insn(env
, ctx
, ISA_MIPS32R2
);
3509 gen_helper_mtc0_srsconf3(t0
);
3513 check_insn(env
, ctx
, ISA_MIPS32R2
);
3514 gen_helper_mtc0_srsconf4(t0
);
3524 check_insn(env
, ctx
, ISA_MIPS32R2
);
3525 gen_helper_mtc0_hwrena(t0
);
3539 gen_helper_mtc0_count(t0
);
3542 /* 6,7 are implementation dependent */
3546 /* Stop translation as we may have switched the execution mode */
3547 ctx
->bstate
= BS_STOP
;
3552 gen_helper_mtc0_entryhi(t0
);
3562 gen_helper_mtc0_compare(t0
);
3565 /* 6,7 are implementation dependent */
3569 /* Stop translation as we may have switched the execution mode */
3570 ctx
->bstate
= BS_STOP
;
3575 gen_helper_mtc0_status(t0
);
3576 /* BS_STOP isn't good enough here, hflags may have changed. */
3577 gen_save_pc(ctx
->pc
+ 4);
3578 ctx
->bstate
= BS_EXCP
;
3582 check_insn(env
, ctx
, ISA_MIPS32R2
);
3583 gen_helper_mtc0_intctl(t0
);
3584 /* Stop translation as we may have switched the execution mode */
3585 ctx
->bstate
= BS_STOP
;
3589 check_insn(env
, ctx
, ISA_MIPS32R2
);
3590 gen_helper_mtc0_srsctl(t0
);
3591 /* Stop translation as we may have switched the execution mode */
3592 ctx
->bstate
= BS_STOP
;
3596 check_insn(env
, ctx
, ISA_MIPS32R2
);
3597 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_SRSMap
));
3598 /* Stop translation as we may have switched the execution mode */
3599 ctx
->bstate
= BS_STOP
;
3609 gen_helper_mtc0_cause(t0
);
3615 /* Stop translation as we may have switched the execution mode */
3616 ctx
->bstate
= BS_STOP
;
3621 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_EPC
));
3635 check_insn(env
, ctx
, ISA_MIPS32R2
);
3636 gen_helper_mtc0_ebase(t0
);
3646 gen_helper_mtc0_config0(t0
);
3648 /* Stop translation as we may have switched the execution mode */
3649 ctx
->bstate
= BS_STOP
;
3652 /* ignored, read only */
3656 gen_helper_mtc0_config2(t0
);
3658 /* Stop translation as we may have switched the execution mode */
3659 ctx
->bstate
= BS_STOP
;
3662 /* ignored, read only */
3665 /* 4,5 are reserved */
3666 /* 6,7 are implementation dependent */
3676 rn
= "Invalid config selector";
3693 gen_helper_1i(mtc0_watchlo
, t0
, sel
);
3703 gen_helper_1i(mtc0_watchhi
, t0
, sel
);
3713 #if defined(TARGET_MIPS64)
3714 check_insn(env
, ctx
, ISA_MIPS3
);
3715 gen_helper_mtc0_xcontext(t0
);
3724 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3727 gen_helper_mtc0_framemask(t0
);
3736 rn
= "Diagnostic"; /* implementation dependent */
3741 gen_helper_mtc0_debug(t0
); /* EJTAG support */
3742 /* BS_STOP isn't good enough here, hflags may have changed. */
3743 gen_save_pc(ctx
->pc
+ 4);
3744 ctx
->bstate
= BS_EXCP
;
3748 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
3749 rn
= "TraceControl";
3750 /* Stop translation as we may have switched the execution mode */
3751 ctx
->bstate
= BS_STOP
;
3754 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
3755 rn
= "TraceControl2";
3756 /* Stop translation as we may have switched the execution mode */
3757 ctx
->bstate
= BS_STOP
;
3760 /* Stop translation as we may have switched the execution mode */
3761 ctx
->bstate
= BS_STOP
;
3762 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
3763 rn
= "UserTraceData";
3764 /* Stop translation as we may have switched the execution mode */
3765 ctx
->bstate
= BS_STOP
;
3768 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
3769 /* Stop translation as we may have switched the execution mode */
3770 ctx
->bstate
= BS_STOP
;
3781 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_DEPC
));
3791 gen_helper_mtc0_performance0(t0
);
3792 rn
= "Performance0";
3795 // gen_helper_mtc0_performance1(t0);
3796 rn
= "Performance1";
3799 // gen_helper_mtc0_performance2(t0);
3800 rn
= "Performance2";
3803 // gen_helper_mtc0_performance3(t0);
3804 rn
= "Performance3";
3807 // gen_helper_mtc0_performance4(t0);
3808 rn
= "Performance4";
3811 // gen_helper_mtc0_performance5(t0);
3812 rn
= "Performance5";
3815 // gen_helper_mtc0_performance6(t0);
3816 rn
= "Performance6";
3819 // gen_helper_mtc0_performance7(t0);
3820 rn
= "Performance7";
3846 gen_helper_mtc0_taglo(t0
);
3853 gen_helper_mtc0_datalo(t0
);
3866 gen_helper_mtc0_taghi(t0
);
3873 gen_helper_mtc0_datahi(t0
);
3884 gen_mtc0_store64(t0
, offsetof(CPUState
, CP0_ErrorEPC
));
3895 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_DESAVE
));
3901 /* Stop translation as we may have switched the execution mode */
3902 ctx
->bstate
= BS_STOP
;
3907 #if defined MIPS_DEBUG_DISAS
3908 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
3909 fprintf(logfile
, "mtc0 %s (reg %d sel %d)\n",
3913 /* For simplicity assume that all writes can cause interrupts. */
3916 ctx
->bstate
= BS_STOP
;
3921 #if defined MIPS_DEBUG_DISAS
3922 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
3923 fprintf(logfile
, "mtc0 %s (reg %d sel %d)\n",
3927 generate_exception(ctx
, EXCP_RI
);
3930 #if defined(TARGET_MIPS64)
3931 static void gen_dmfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
3933 const char *rn
= "invalid";
3936 check_insn(env
, ctx
, ISA_MIPS64
);
3942 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Index
));
3946 check_insn(env
, ctx
, ASE_MT
);
3947 gen_helper_mfc0_mvpcontrol(t0
);
3951 check_insn(env
, ctx
, ASE_MT
);
3952 gen_helper_mfc0_mvpconf0(t0
);
3956 check_insn(env
, ctx
, ASE_MT
);
3957 gen_helper_mfc0_mvpconf1(t0
);
3967 gen_helper_mfc0_random(t0
);
3971 check_insn(env
, ctx
, ASE_MT
);
3972 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEControl
));
3976 check_insn(env
, ctx
, ASE_MT
);
3977 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf0
));
3981 check_insn(env
, ctx
, ASE_MT
);
3982 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEConf1
));
3986 check_insn(env
, ctx
, ASE_MT
);
3987 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_YQMask
));
3991 check_insn(env
, ctx
, ASE_MT
);
3992 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
3996 check_insn(env
, ctx
, ASE_MT
);
3997 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
3998 rn
= "VPEScheFBack";
4001 check_insn(env
, ctx
, ASE_MT
);
4002 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_VPEOpt
));
4012 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
4016 check_insn(env
, ctx
, ASE_MT
);
4017 gen_helper_mfc0_tcstatus(t0
);
4021 check_insn(env
, ctx
, ASE_MT
);
4022 gen_helper_mfc0_tcbind(t0
);
4026 check_insn(env
, ctx
, ASE_MT
);
4027 gen_helper_dmfc0_tcrestart(t0
);
4031 check_insn(env
, ctx
, ASE_MT
);
4032 gen_helper_dmfc0_tchalt(t0
);
4036 check_insn(env
, ctx
, ASE_MT
);
4037 gen_helper_dmfc0_tccontext(t0
);
4041 check_insn(env
, ctx
, ASE_MT
);
4042 gen_helper_dmfc0_tcschedule(t0
);
4046 check_insn(env
, ctx
, ASE_MT
);
4047 gen_helper_dmfc0_tcschefback(t0
);
4057 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
4067 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_Context
));
4071 // gen_helper_dmfc0_contextconfig(t0); /* SmartMIPS ASE */
4072 rn
= "ContextConfig";
4081 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageMask
));
4085 check_insn(env
, ctx
, ISA_MIPS32R2
);
4086 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PageGrain
));
4096 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Wired
));
4100 check_insn(env
, ctx
, ISA_MIPS32R2
);
4101 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf0
));
4105 check_insn(env
, ctx
, ISA_MIPS32R2
);
4106 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf1
));
4110 check_insn(env
, ctx
, ISA_MIPS32R2
);
4111 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf2
));
4115 check_insn(env
, ctx
, ISA_MIPS32R2
);
4116 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf3
));
4120 check_insn(env
, ctx
, ISA_MIPS32R2
);
4121 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSConf4
));
4131 check_insn(env
, ctx
, ISA_MIPS32R2
);
4132 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_HWREna
));
4142 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
4152 /* Mark as an IO operation because we read the time. */
4155 gen_helper_mfc0_count(t0
);
4158 ctx
->bstate
= BS_STOP
;
4162 /* 6,7 are implementation dependent */
4170 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
4180 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Compare
));
4183 /* 6,7 are implementation dependent */
4191 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Status
));
4195 check_insn(env
, ctx
, ISA_MIPS32R2
);
4196 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_IntCtl
));
4200 check_insn(env
, ctx
, ISA_MIPS32R2
);
4201 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSCtl
));
4205 check_insn(env
, ctx
, ISA_MIPS32R2
);
4206 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_SRSMap
));
4216 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Cause
));
4226 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4236 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_PRid
));
4240 check_insn(env
, ctx
, ISA_MIPS32R2
);
4241 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_EBase
));
4251 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config0
));
4255 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config1
));
4259 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config2
));
4263 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config3
));
4266 /* 6,7 are implementation dependent */
4268 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config6
));
4272 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Config7
));
4282 gen_helper_dmfc0_lladdr(t0
);
4292 gen_helper_1i(dmfc0_watchlo
, t0
, sel
);
4302 gen_helper_1i(mfc0_watchhi
, t0
, sel
);
4312 check_insn(env
, ctx
, ISA_MIPS3
);
4313 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
4321 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4324 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Framemask
));
4332 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4333 rn
= "'Diagnostic"; /* implementation dependent */
4338 gen_helper_mfc0_debug(t0
); /* EJTAG support */
4342 // gen_helper_dmfc0_tracecontrol(t0); /* PDtrace support */
4343 rn
= "TraceControl";
4346 // gen_helper_dmfc0_tracecontrol2(t0); /* PDtrace support */
4347 rn
= "TraceControl2";
4350 // gen_helper_dmfc0_usertracedata(t0); /* PDtrace support */
4351 rn
= "UserTraceData";
4354 // gen_helper_dmfc0_tracebpc(t0); /* PDtrace support */
4365 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4375 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_Performance0
));
4376 rn
= "Performance0";
4379 // gen_helper_dmfc0_performance1(t0);
4380 rn
= "Performance1";
4383 // gen_helper_dmfc0_performance2(t0);
4384 rn
= "Performance2";
4387 // gen_helper_dmfc0_performance3(t0);
4388 rn
= "Performance3";
4391 // gen_helper_dmfc0_performance4(t0);
4392 rn
= "Performance4";
4395 // gen_helper_dmfc0_performance5(t0);
4396 rn
= "Performance5";
4399 // gen_helper_dmfc0_performance6(t0);
4400 rn
= "Performance6";
4403 // gen_helper_dmfc0_performance7(t0);
4404 rn
= "Performance7";
4411 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4418 tcg_gen_movi_tl(t0
, 0); /* unimplemented */
4431 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagLo
));
4438 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataLo
));
4451 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_TagHi
));
4458 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DataHi
));
4468 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
4479 gen_mfc0_load32(t0
, offsetof(CPUState
, CP0_DESAVE
));
4489 #if defined MIPS_DEBUG_DISAS
4490 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4491 fprintf(logfile
, "dmfc0 %s (reg %d sel %d)\n",
4498 #if defined MIPS_DEBUG_DISAS
4499 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
4500 fprintf(logfile
, "dmfc0 %s (reg %d sel %d)\n",
4504 generate_exception(ctx
, EXCP_RI
);
4507 static void gen_dmtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv t0
, int reg
, int sel
)
4509 const char *rn
= "invalid";
4512 check_insn(env
, ctx
, ISA_MIPS64
);
4521 gen_helper_mtc0_index(t0
);
4525 check_insn(env
, ctx
, ASE_MT
);
4526 gen_helper_mtc0_mvpcontrol(t0
);
4530 check_insn(env
, ctx
, ASE_MT
);
4535 check_insn(env
, ctx
, ASE_MT
);
4550 check_insn(env
, ctx
, ASE_MT
);
4551 gen_helper_mtc0_vpecontrol(t0
);
4555 check_insn(env
, ctx
, ASE_MT
);
4556 gen_helper_mtc0_vpeconf0(t0
);
4560 check_insn(env
, ctx
, ASE_MT
);
4561 gen_helper_mtc0_vpeconf1(t0
);
4565 check_insn(env
, ctx
, ASE_MT
);
4566 gen_helper_mtc0_yqmask(t0
);
4570 check_insn(env
, ctx
, ASE_MT
);
4571 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4575 check_insn(env
, ctx
, ASE_MT
);
4576 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4577 rn
= "VPEScheFBack";
4580 check_insn(env
, ctx
, ASE_MT
);
4581 gen_helper_mtc0_vpeopt(t0
);
4591 gen_helper_mtc0_entrylo0(t0
);
4595 check_insn(env
, ctx
, ASE_MT
);
4596 gen_helper_mtc0_tcstatus(t0
);
4600 check_insn(env
, ctx
, ASE_MT
);
4601 gen_helper_mtc0_tcbind(t0
);
4605 check_insn(env
, ctx
, ASE_MT
);
4606 gen_helper_mtc0_tcrestart(t0
);
4610 check_insn(env
, ctx
, ASE_MT
);
4611 gen_helper_mtc0_tchalt(t0
);
4615 check_insn(env
, ctx
, ASE_MT
);
4616 gen_helper_mtc0_tccontext(t0
);
4620 check_insn(env
, ctx
, ASE_MT
);
4621 gen_helper_mtc0_tcschedule(t0
);
4625 check_insn(env
, ctx
, ASE_MT
);
4626 gen_helper_mtc0_tcschefback(t0
);
4636 gen_helper_mtc0_entrylo1(t0
);
4646 gen_helper_mtc0_context(t0
);
4650 // gen_helper_mtc0_contextconfig(t0); /* SmartMIPS ASE */
4651 rn
= "ContextConfig";
4660 gen_helper_mtc0_pagemask(t0
);
4664 check_insn(env
, ctx
, ISA_MIPS32R2
);
4665 gen_helper_mtc0_pagegrain(t0
);
4675 gen_helper_mtc0_wired(t0
);
4679 check_insn(env
, ctx
, ISA_MIPS32R2
);
4680 gen_helper_mtc0_srsconf0(t0
);
4684 check_insn(env
, ctx
, ISA_MIPS32R2
);
4685 gen_helper_mtc0_srsconf1(t0
);
4689 check_insn(env
, ctx
, ISA_MIPS32R2
);
4690 gen_helper_mtc0_srsconf2(t0
);
4694 check_insn(env
, ctx
, ISA_MIPS32R2
);
4695 gen_helper_mtc0_srsconf3(t0
);
4699 check_insn(env
, ctx
, ISA_MIPS32R2
);
4700 gen_helper_mtc0_srsconf4(t0
);
4710 check_insn(env
, ctx
, ISA_MIPS32R2
);
4711 gen_helper_mtc0_hwrena(t0
);
4725 gen_helper_mtc0_count(t0
);
4728 /* 6,7 are implementation dependent */
4732 /* Stop translation as we may have switched the execution mode */
4733 ctx
->bstate
= BS_STOP
;
4738 gen_helper_mtc0_entryhi(t0
);
4748 gen_helper_mtc0_compare(t0
);
4751 /* 6,7 are implementation dependent */
4755 /* Stop translation as we may have switched the execution mode */
4756 ctx
->bstate
= BS_STOP
;
4761 gen_helper_mtc0_status(t0
);
4762 /* BS_STOP isn't good enough here, hflags may have changed. */
4763 gen_save_pc(ctx
->pc
+ 4);
4764 ctx
->bstate
= BS_EXCP
;
4768 check_insn(env
, ctx
, ISA_MIPS32R2
);
4769 gen_helper_mtc0_intctl(t0
);
4770 /* Stop translation as we may have switched the execution mode */
4771 ctx
->bstate
= BS_STOP
;
4775 check_insn(env
, ctx
, ISA_MIPS32R2
);
4776 gen_helper_mtc0_srsctl(t0
);
4777 /* Stop translation as we may have switched the execution mode */
4778 ctx
->bstate
= BS_STOP
;
4782 check_insn(env
, ctx
, ISA_MIPS32R2
);
4783 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_SRSMap
));
4784 /* Stop translation as we may have switched the execution mode */
4785 ctx
->bstate
= BS_STOP
;
4795 gen_helper_mtc0_cause(t0
);
4801 /* Stop translation as we may have switched the execution mode */
4802 ctx
->bstate
= BS_STOP
;
4807 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4821 check_insn(env
, ctx
, ISA_MIPS32R2
);
4822 gen_helper_mtc0_ebase(t0
);
4832 gen_helper_mtc0_config0(t0
);
4834 /* Stop translation as we may have switched the execution mode */
4835 ctx
->bstate
= BS_STOP
;
4842 gen_helper_mtc0_config2(t0
);
4844 /* Stop translation as we may have switched the execution mode */
4845 ctx
->bstate
= BS_STOP
;
4851 /* 6,7 are implementation dependent */
4853 rn
= "Invalid config selector";
4870 gen_helper_1i(mtc0_watchlo
, t0
, sel
);
4880 gen_helper_1i(mtc0_watchhi
, t0
, sel
);
4890 check_insn(env
, ctx
, ISA_MIPS3
);
4891 gen_helper_mtc0_xcontext(t0
);
4899 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4902 gen_helper_mtc0_framemask(t0
);
4911 rn
= "Diagnostic"; /* implementation dependent */
4916 gen_helper_mtc0_debug(t0
); /* EJTAG support */
4917 /* BS_STOP isn't good enough here, hflags may have changed. */
4918 gen_save_pc(ctx
->pc
+ 4);
4919 ctx
->bstate
= BS_EXCP
;
4923 // gen_helper_mtc0_tracecontrol(t0); /* PDtrace support */
4924 /* Stop translation as we may have switched the execution mode */
4925 ctx
->bstate
= BS_STOP
;
4926 rn
= "TraceControl";
4929 // gen_helper_mtc0_tracecontrol2(t0); /* PDtrace support */
4930 /* Stop translation as we may have switched the execution mode */
4931 ctx
->bstate
= BS_STOP
;
4932 rn
= "TraceControl2";
4935 // gen_helper_mtc0_usertracedata(t0); /* PDtrace support */
4936 /* Stop translation as we may have switched the execution mode */
4937 ctx
->bstate
= BS_STOP
;
4938 rn
= "UserTraceData";
4941 // gen_helper_mtc0_tracebpc(t0); /* PDtrace support */
4942 /* Stop translation as we may have switched the execution mode */
4943 ctx
->bstate
= BS_STOP
;
4954 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4964 gen_helper_mtc0_performance0(t0
);
4965 rn
= "Performance0";
4968 // gen_helper_mtc0_performance1(t0);
4969 rn
= "Performance1";
4972 // gen_helper_mtc0_performance2(t0);
4973 rn
= "Performance2";
4976 // gen_helper_mtc0_performance3(t0);
4977 rn
= "Performance3";
4980 // gen_helper_mtc0_performance4(t0);
4981 rn
= "Performance4";
4984 // gen_helper_mtc0_performance5(t0);
4985 rn
= "Performance5";
4988 // gen_helper_mtc0_performance6(t0);
4989 rn
= "Performance6";
4992 // gen_helper_mtc0_performance7(t0);
4993 rn
= "Performance7";
5019 gen_helper_mtc0_taglo(t0
);
5026 gen_helper_mtc0_datalo(t0
);
5039 gen_helper_mtc0_taghi(t0
);
5046 gen_helper_mtc0_datahi(t0
);
5057 tcg_gen_st_tl(t0
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
5068 gen_mtc0_store32(t0
, offsetof(CPUState
, CP0_DESAVE
));
5074 /* Stop translation as we may have switched the execution mode */
5075 ctx
->bstate
= BS_STOP
;
5080 #if defined MIPS_DEBUG_DISAS
5081 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5082 fprintf(logfile
, "dmtc0 %s (reg %d sel %d)\n",
5086 /* For simplicity assume that all writes can cause interrupts. */
5089 ctx
->bstate
= BS_STOP
;
5094 #if defined MIPS_DEBUG_DISAS
5095 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5096 fprintf(logfile
, "dmtc0 %s (reg %d sel %d)\n",
5100 generate_exception(ctx
, EXCP_RI
);
5102 #endif /* TARGET_MIPS64 */
5104 static void gen_mftr(CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5105 int u
, int sel
, int h
)
5107 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5108 TCGv t0
= tcg_temp_local_new();
5110 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5111 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5112 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5113 tcg_gen_movi_tl(t0
, -1);
5114 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5115 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5116 tcg_gen_movi_tl(t0
, -1);
5122 gen_helper_mftc0_tcstatus(t0
);
5125 gen_helper_mftc0_tcbind(t0
);
5128 gen_helper_mftc0_tcrestart(t0
);
5131 gen_helper_mftc0_tchalt(t0
);
5134 gen_helper_mftc0_tccontext(t0
);
5137 gen_helper_mftc0_tcschedule(t0
);
5140 gen_helper_mftc0_tcschefback(t0
);
5143 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5150 gen_helper_mftc0_entryhi(t0
);
5153 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5159 gen_helper_mftc0_status(t0
);
5162 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5168 gen_helper_mftc0_debug(t0
);
5171 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5176 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5178 } else switch (sel
) {
5179 /* GPR registers. */
5181 gen_helper_1i(mftgpr
, t0
, rt
);
5183 /* Auxiliary CPU registers */
5187 gen_helper_1i(mftlo
, t0
, 0);
5190 gen_helper_1i(mfthi
, t0
, 0);
5193 gen_helper_1i(mftacx
, t0
, 0);
5196 gen_helper_1i(mftlo
, t0
, 1);
5199 gen_helper_1i(mfthi
, t0
, 1);
5202 gen_helper_1i(mftacx
, t0
, 1);
5205 gen_helper_1i(mftlo
, t0
, 2);
5208 gen_helper_1i(mfthi
, t0
, 2);
5211 gen_helper_1i(mftacx
, t0
, 2);
5214 gen_helper_1i(mftlo
, t0
, 3);
5217 gen_helper_1i(mfthi
, t0
, 3);
5220 gen_helper_1i(mftacx
, t0
, 3);
5223 gen_helper_mftdsp(t0
);
5229 /* Floating point (COP1). */
5231 /* XXX: For now we support only a single FPU context. */
5233 TCGv_i32 fp0
= tcg_temp_new_i32();
5235 gen_load_fpr32(fp0
, rt
);
5236 tcg_gen_ext_i32_tl(t0
, fp0
);
5237 tcg_temp_free_i32(fp0
);
5239 TCGv_i32 fp0
= tcg_temp_new_i32();
5241 gen_load_fpr32h(fp0
, rt
);
5242 tcg_gen_ext_i32_tl(t0
, fp0
);
5243 tcg_temp_free_i32(fp0
);
5247 /* XXX: For now we support only a single FPU context. */
5248 gen_helper_1i(cfc1
, t0
, rt
);
5250 /* COP2: Not implemented. */
5257 #if defined MIPS_DEBUG_DISAS
5258 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5259 fprintf(logfile
, "mftr (reg %d u %d sel %d h %d)\n",
5263 gen_store_gpr(t0
, rd
);
5269 #if defined MIPS_DEBUG_DISAS
5270 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5271 fprintf(logfile
, "mftr (reg %d u %d sel %d h %d)\n",
5275 generate_exception(ctx
, EXCP_RI
);
5278 static void gen_mttr(CPUState
*env
, DisasContext
*ctx
, int rd
, int rt
,
5279 int u
, int sel
, int h
)
5281 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5282 TCGv t0
= tcg_temp_local_new();
5284 gen_load_gpr(t0
, rt
);
5285 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5286 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5287 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5289 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5290 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5297 gen_helper_mttc0_tcstatus(t0
);
5300 gen_helper_mttc0_tcbind(t0
);
5303 gen_helper_mttc0_tcrestart(t0
);
5306 gen_helper_mttc0_tchalt(t0
);
5309 gen_helper_mttc0_tccontext(t0
);
5312 gen_helper_mttc0_tcschedule(t0
);
5315 gen_helper_mttc0_tcschefback(t0
);
5318 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5325 gen_helper_mttc0_entryhi(t0
);
5328 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5334 gen_helper_mttc0_status(t0
);
5337 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5343 gen_helper_mttc0_debug(t0
);
5346 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5351 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5353 } else switch (sel
) {
5354 /* GPR registers. */
5356 gen_helper_1i(mttgpr
, t0
, rd
);
5358 /* Auxiliary CPU registers */
5362 gen_helper_1i(mttlo
, t0
, 0);
5365 gen_helper_1i(mtthi
, t0
, 0);
5368 gen_helper_1i(mttacx
, t0
, 0);
5371 gen_helper_1i(mttlo
, t0
, 1);
5374 gen_helper_1i(mtthi
, t0
, 1);
5377 gen_helper_1i(mttacx
, t0
, 1);
5380 gen_helper_1i(mttlo
, t0
, 2);
5383 gen_helper_1i(mtthi
, t0
, 2);
5386 gen_helper_1i(mttacx
, t0
, 2);
5389 gen_helper_1i(mttlo
, t0
, 3);
5392 gen_helper_1i(mtthi
, t0
, 3);
5395 gen_helper_1i(mttacx
, t0
, 3);
5398 gen_helper_mttdsp(t0
);
5404 /* Floating point (COP1). */
5406 /* XXX: For now we support only a single FPU context. */
5408 TCGv_i32 fp0
= tcg_temp_new_i32();
5410 tcg_gen_trunc_tl_i32(fp0
, t0
);
5411 gen_store_fpr32(fp0
, rd
);
5412 tcg_temp_free_i32(fp0
);
5414 TCGv_i32 fp0
= tcg_temp_new_i32();
5416 tcg_gen_trunc_tl_i32(fp0
, t0
);
5417 gen_store_fpr32h(fp0
, rd
);
5418 tcg_temp_free_i32(fp0
);
5422 /* XXX: For now we support only a single FPU context. */
5423 gen_helper_1i(ctc1
, t0
, rd
);
5425 /* COP2: Not implemented. */
5432 #if defined MIPS_DEBUG_DISAS
5433 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5434 fprintf(logfile
, "mttr (reg %d u %d sel %d h %d)\n",
5443 #if defined MIPS_DEBUG_DISAS
5444 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
5445 fprintf(logfile
, "mttr (reg %d u %d sel %d h %d)\n",
5449 generate_exception(ctx
, EXCP_RI
);
5452 static void gen_cp0 (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
5454 const char *opn
= "ldst";
5463 TCGv t0
= tcg_temp_local_new();
5465 gen_mfc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5466 gen_store_gpr(t0
, rt
);
5473 TCGv t0
= tcg_temp_local_new();
5475 gen_load_gpr(t0
, rt
);
5476 save_cpu_state(ctx
, 1);
5477 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5482 #if defined(TARGET_MIPS64)
5484 check_insn(env
, ctx
, ISA_MIPS3
);
5490 TCGv t0
= tcg_temp_local_new();
5492 gen_dmfc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5493 gen_store_gpr(t0
, rt
);
5499 check_insn(env
, ctx
, ISA_MIPS3
);
5501 TCGv t0
= tcg_temp_local_new();
5503 gen_load_gpr(t0
, rt
);
5504 save_cpu_state(ctx
, 1);
5505 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5512 check_insn(env
, ctx
, ASE_MT
);
5517 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
5518 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5522 check_insn(env
, ctx
, ASE_MT
);
5523 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
5524 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5529 if (!env
->tlb
->do_tlbwi
)
5535 if (!env
->tlb
->do_tlbwr
)
5541 if (!env
->tlb
->do_tlbp
)
5547 if (!env
->tlb
->do_tlbr
)
5553 check_insn(env
, ctx
, ISA_MIPS2
);
5554 save_cpu_state(ctx
, 1);
5556 ctx
->bstate
= BS_EXCP
;
5560 check_insn(env
, ctx
, ISA_MIPS32
);
5561 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
5563 generate_exception(ctx
, EXCP_RI
);
5565 save_cpu_state(ctx
, 1);
5567 ctx
->bstate
= BS_EXCP
;
5572 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
5573 /* If we get an exception, we want to restart at next instruction */
5575 save_cpu_state(ctx
, 1);
5578 ctx
->bstate
= BS_EXCP
;
5583 generate_exception(ctx
, EXCP_RI
);
5586 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
5588 #endif /* !CONFIG_USER_ONLY */
5590 /* CP1 Branches (before delay slot) */
5591 static void gen_compute_branch1 (CPUState
*env
, DisasContext
*ctx
, uint32_t op
,
5592 int32_t cc
, int32_t offset
)
5594 target_ulong btarget
;
5595 const char *opn
= "cp1 cond branch";
5596 TCGv_i32 t0
= tcg_temp_new_i32();
5599 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
5601 btarget
= ctx
->pc
+ 4 + offset
;
5606 int l1
= gen_new_label();
5607 int l2
= gen_new_label();
5610 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5611 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5612 tcg_gen_movi_i32(bcond
, 0);
5615 tcg_gen_movi_i32(bcond
, 1);
5622 int l1
= gen_new_label();
5623 int l2
= gen_new_label();
5626 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5627 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5628 tcg_gen_movi_i32(bcond
, 0);
5631 tcg_gen_movi_i32(bcond
, 1);
5638 int l1
= gen_new_label();
5639 int l2
= gen_new_label();
5642 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5643 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5644 tcg_gen_movi_i32(bcond
, 0);
5647 tcg_gen_movi_i32(bcond
, 1);
5654 int l1
= gen_new_label();
5655 int l2
= gen_new_label();
5658 tcg_gen_andi_i32(t0
, t0
, 0x1 << cc
);
5659 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5660 tcg_gen_movi_i32(bcond
, 0);
5663 tcg_gen_movi_i32(bcond
, 1);
5668 ctx
->hflags
|= MIPS_HFLAG_BL
;
5672 int l1
= gen_new_label();
5673 int l2
= gen_new_label();
5676 tcg_gen_andi_i32(t0
, t0
, 0x3 << cc
);
5677 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5678 tcg_gen_movi_i32(bcond
, 0);
5681 tcg_gen_movi_i32(bcond
, 1);
5688 int l1
= gen_new_label();
5689 int l2
= gen_new_label();
5692 tcg_gen_andi_i32(t0
, t0
, 0x3 << cc
);
5693 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5694 tcg_gen_movi_i32(bcond
, 0);
5697 tcg_gen_movi_i32(bcond
, 1);
5704 int l1
= gen_new_label();
5705 int l2
= gen_new_label();
5708 tcg_gen_andi_i32(t0
, t0
, 0xf << cc
);
5709 tcg_gen_brcondi_i32(TCG_COND_EQ
, t0
, 0, l1
);
5710 tcg_gen_movi_i32(bcond
, 0);
5713 tcg_gen_movi_i32(bcond
, 1);
5720 int l1
= gen_new_label();
5721 int l2
= gen_new_label();
5724 tcg_gen_andi_i32(t0
, t0
, 0xf << cc
);
5725 tcg_gen_brcondi_i32(TCG_COND_NE
, t0
, 0, l1
);
5726 tcg_gen_movi_i32(bcond
, 0);
5729 tcg_gen_movi_i32(bcond
, 1);
5734 ctx
->hflags
|= MIPS_HFLAG_BC
;
5738 generate_exception (ctx
, EXCP_RI
);
5741 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
5742 ctx
->hflags
, btarget
);
5743 ctx
->btarget
= btarget
;
5746 tcg_temp_free_i32(t0
);
5749 /* Coprocessor 1 (FPU) */
5751 #define FOP(func, fmt) (((fmt) << 21) | (func))
5753 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
5755 const char *opn
= "cp1 move";
5756 TCGv t0
= tcg_temp_local_new();
5761 TCGv_i32 fp0
= tcg_temp_new_i32();
5763 gen_load_fpr32(fp0
, fs
);
5764 tcg_gen_ext_i32_tl(t0
, fp0
);
5765 tcg_temp_free_i32(fp0
);
5767 gen_store_gpr(t0
, rt
);
5771 gen_load_gpr(t0
, rt
);
5773 TCGv_i32 fp0
= tcg_temp_new_i32();
5775 tcg_gen_trunc_tl_i32(fp0
, t0
);
5776 gen_store_fpr32(fp0
, fs
);
5777 tcg_temp_free_i32(fp0
);
5782 gen_helper_1i(cfc1
, t0
, fs
);
5783 gen_store_gpr(t0
, rt
);
5787 gen_load_gpr(t0
, rt
);
5788 gen_helper_1i(ctc1
, t0
, fs
);
5793 TCGv_i64 fp0
= tcg_temp_new_i64();
5795 gen_load_fpr64(ctx
, fp0
, fs
);
5796 tcg_gen_trunc_i64_tl(t0
, fp0
);
5797 tcg_temp_free_i64(fp0
);
5799 gen_store_gpr(t0
, rt
);
5803 gen_load_gpr(t0
, rt
);
5805 TCGv_i64 fp0
= tcg_temp_new_i64();
5807 tcg_gen_extu_tl_i64(fp0
, t0
);
5808 gen_store_fpr64(ctx
, fp0
, fs
);
5809 tcg_temp_free_i64(fp0
);
5815 TCGv_i32 fp0
= tcg_temp_new_i32();
5817 gen_load_fpr32h(fp0
, fs
);
5818 tcg_gen_ext_i32_tl(t0
, fp0
);
5819 tcg_temp_free_i32(fp0
);
5821 gen_store_gpr(t0
, rt
);
5825 gen_load_gpr(t0
, rt
);
5827 TCGv_i32 fp0
= tcg_temp_new_i32();
5829 tcg_gen_trunc_tl_i32(fp0
, t0
);
5830 gen_store_fpr32h(fp0
, fs
);
5831 tcg_temp_free_i32(fp0
);
5837 generate_exception (ctx
, EXCP_RI
);
5840 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
5846 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
5848 int l1
= gen_new_label();
5851 TCGv t0
= tcg_temp_local_new();
5852 TCGv_i32 r_tmp
= tcg_temp_new_i32();
5855 ccbit
= 1 << (24 + cc
);
5863 gen_load_gpr(t0
, rd
);
5864 tcg_gen_andi_i32(r_tmp
, fpu_fcr31
, ccbit
);
5865 tcg_gen_brcondi_i32(cond
, r_tmp
, 0, l1
);
5866 tcg_temp_free_i32(r_tmp
);
5867 gen_load_gpr(t0
, rs
);
5869 gen_store_gpr(t0
, rd
);
5873 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
5877 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5878 TCGv_i32 fp0
= tcg_temp_local_new_i32();
5879 int l1
= gen_new_label();
5882 ccbit
= 1 << (24 + cc
);
5891 gen_load_fpr32(fp0
, fd
);
5892 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit
);
5893 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5894 tcg_temp_free_i32(r_tmp1
);
5895 gen_load_fpr32(fp0
, fs
);
5897 gen_store_fpr32(fp0
, fd
);
5898 tcg_temp_free_i32(fp0
);
5901 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
5905 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5906 TCGv_i64 fp0
= tcg_temp_local_new_i64();
5907 int l1
= gen_new_label();
5910 ccbit
= 1 << (24 + cc
);
5919 gen_load_fpr64(ctx
, fp0
, fd
);
5920 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit
);
5921 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5922 tcg_temp_free_i32(r_tmp1
);
5923 gen_load_fpr64(ctx
, fp0
, fs
);
5925 gen_store_fpr64(ctx
, fp0
, fd
);
5926 tcg_temp_free_i64(fp0
);
5929 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
5931 uint32_t ccbit1
, ccbit2
;
5933 TCGv_i32 r_tmp1
= tcg_temp_new_i32();
5934 TCGv_i32 fp0
= tcg_temp_local_new_i32();
5935 int l1
= gen_new_label();
5936 int l2
= gen_new_label();
5939 ccbit1
= 1 << (24 + cc
);
5940 ccbit2
= 1 << (25 + cc
);
5951 gen_load_fpr32(fp0
, fd
);
5952 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit1
);
5953 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l1
);
5954 gen_load_fpr32(fp0
, fs
);
5956 gen_store_fpr32(fp0
, fd
);
5958 gen_load_fpr32h(fp0
, fd
);
5959 tcg_gen_andi_i32(r_tmp1
, fpu_fcr31
, ccbit2
);
5960 tcg_gen_brcondi_i32(cond
, r_tmp1
, 0, l2
);
5961 gen_load_fpr32h(fp0
, fs
);
5963 gen_store_fpr32h(fp0
, fd
);
5965 tcg_temp_free_i32(r_tmp1
);
5966 tcg_temp_free_i32(fp0
);
5970 static void gen_farith (DisasContext
*ctx
, uint32_t op1
,
5971 int ft
, int fs
, int fd
, int cc
)
5973 const char *opn
= "farith";
5974 const char *condnames
[] = {
5992 const char *condnames_abs
[] = {
6010 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6011 uint32_t func
= ctx
->opcode
& 0x3f;
6013 switch (ctx
->opcode
& FOP(0x3f, 0x1f)) {
6016 TCGv_i32 fp0
= tcg_temp_new_i32();
6017 TCGv_i32 fp1
= tcg_temp_new_i32();
6019 gen_load_fpr32(fp0
, fs
);
6020 gen_load_fpr32(fp1
, ft
);
6021 gen_helper_float_add_s(fp0
, fp0
, fp1
);
6022 tcg_temp_free_i32(fp1
);
6023 gen_store_fpr32(fp0
, fd
);
6024 tcg_temp_free_i32(fp0
);
6031 TCGv_i32 fp0
= tcg_temp_new_i32();
6032 TCGv_i32 fp1
= tcg_temp_new_i32();
6034 gen_load_fpr32(fp0
, fs
);
6035 gen_load_fpr32(fp1
, ft
);
6036 gen_helper_float_sub_s(fp0
, fp0
, fp1
);
6037 tcg_temp_free_i32(fp1
);
6038 gen_store_fpr32(fp0
, fd
);
6039 tcg_temp_free_i32(fp0
);
6046 TCGv_i32 fp0
= tcg_temp_new_i32();
6047 TCGv_i32 fp1
= tcg_temp_new_i32();
6049 gen_load_fpr32(fp0
, fs
);
6050 gen_load_fpr32(fp1
, ft
);
6051 gen_helper_float_mul_s(fp0
, fp0
, fp1
);
6052 tcg_temp_free_i32(fp1
);
6053 gen_store_fpr32(fp0
, fd
);
6054 tcg_temp_free_i32(fp0
);
6061 TCGv_i32 fp0
= tcg_temp_new_i32();
6062 TCGv_i32 fp1
= tcg_temp_new_i32();
6064 gen_load_fpr32(fp0
, fs
);
6065 gen_load_fpr32(fp1
, ft
);
6066 gen_helper_float_div_s(fp0
, fp0
, fp1
);
6067 tcg_temp_free_i32(fp1
);
6068 gen_store_fpr32(fp0
, fd
);
6069 tcg_temp_free_i32(fp0
);
6076 TCGv_i32 fp0
= tcg_temp_new_i32();
6078 gen_load_fpr32(fp0
, fs
);
6079 gen_helper_float_sqrt_s(fp0
, fp0
);
6080 gen_store_fpr32(fp0
, fd
);
6081 tcg_temp_free_i32(fp0
);
6087 TCGv_i32 fp0
= tcg_temp_new_i32();
6089 gen_load_fpr32(fp0
, fs
);
6090 gen_helper_float_abs_s(fp0
, fp0
);
6091 gen_store_fpr32(fp0
, fd
);
6092 tcg_temp_free_i32(fp0
);
6098 TCGv_i32 fp0
= tcg_temp_new_i32();
6100 gen_load_fpr32(fp0
, fs
);
6101 gen_store_fpr32(fp0
, fd
);
6102 tcg_temp_free_i32(fp0
);
6108 TCGv_i32 fp0
= tcg_temp_new_i32();
6110 gen_load_fpr32(fp0
, fs
);
6111 gen_helper_float_chs_s(fp0
, fp0
);
6112 gen_store_fpr32(fp0
, fd
);
6113 tcg_temp_free_i32(fp0
);
6118 check_cp1_64bitmode(ctx
);
6120 TCGv_i32 fp32
= tcg_temp_new_i32();
6121 TCGv_i64 fp64
= tcg_temp_new_i64();
6123 gen_load_fpr32(fp32
, fs
);
6124 gen_helper_float_roundl_s(fp64
, fp32
);
6125 tcg_temp_free_i32(fp32
);
6126 gen_store_fpr64(ctx
, fp64
, fd
);
6127 tcg_temp_free_i64(fp64
);
6132 check_cp1_64bitmode(ctx
);
6134 TCGv_i32 fp32
= tcg_temp_new_i32();
6135 TCGv_i64 fp64
= tcg_temp_new_i64();
6137 gen_load_fpr32(fp32
, fs
);
6138 gen_helper_float_truncl_s(fp64
, fp32
);
6139 tcg_temp_free_i32(fp32
);
6140 gen_store_fpr64(ctx
, fp64
, fd
);
6141 tcg_temp_free_i64(fp64
);
6146 check_cp1_64bitmode(ctx
);
6148 TCGv_i32 fp32
= tcg_temp_new_i32();
6149 TCGv_i64 fp64
= tcg_temp_new_i64();
6151 gen_load_fpr32(fp32
, fs
);
6152 gen_helper_float_ceill_s(fp64
, fp32
);
6153 tcg_temp_free_i32(fp32
);
6154 gen_store_fpr64(ctx
, fp64
, fd
);
6155 tcg_temp_free_i64(fp64
);
6160 check_cp1_64bitmode(ctx
);
6162 TCGv_i32 fp32
= tcg_temp_new_i32();
6163 TCGv_i64 fp64
= tcg_temp_new_i64();
6165 gen_load_fpr32(fp32
, fs
);
6166 gen_helper_float_floorl_s(fp64
, fp32
);
6167 tcg_temp_free_i32(fp32
);
6168 gen_store_fpr64(ctx
, fp64
, fd
);
6169 tcg_temp_free_i64(fp64
);
6175 TCGv_i32 fp0
= tcg_temp_new_i32();
6177 gen_load_fpr32(fp0
, fs
);
6178 gen_helper_float_roundw_s(fp0
, fp0
);
6179 gen_store_fpr32(fp0
, fd
);
6180 tcg_temp_free_i32(fp0
);
6186 TCGv_i32 fp0
= tcg_temp_new_i32();
6188 gen_load_fpr32(fp0
, fs
);
6189 gen_helper_float_truncw_s(fp0
, fp0
);
6190 gen_store_fpr32(fp0
, fd
);
6191 tcg_temp_free_i32(fp0
);
6197 TCGv_i32 fp0
= tcg_temp_new_i32();
6199 gen_load_fpr32(fp0
, fs
);
6200 gen_helper_float_ceilw_s(fp0
, fp0
);
6201 gen_store_fpr32(fp0
, fd
);
6202 tcg_temp_free_i32(fp0
);
6208 TCGv_i32 fp0
= tcg_temp_new_i32();
6210 gen_load_fpr32(fp0
, fs
);
6211 gen_helper_float_floorw_s(fp0
, fp0
);
6212 gen_store_fpr32(fp0
, fd
);
6213 tcg_temp_free_i32(fp0
);
6218 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6223 int l1
= gen_new_label();
6224 TCGv t0
= tcg_temp_new();
6225 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6227 gen_load_gpr(t0
, ft
);
6228 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6229 gen_load_fpr32(fp0
, fs
);
6230 gen_store_fpr32(fp0
, fd
);
6231 tcg_temp_free_i32(fp0
);
6239 int l1
= gen_new_label();
6240 TCGv t0
= tcg_temp_new();
6241 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6243 gen_load_gpr(t0
, ft
);
6244 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6245 gen_load_fpr32(fp0
, fs
);
6246 gen_store_fpr32(fp0
, fd
);
6247 tcg_temp_free_i32(fp0
);
6256 TCGv_i32 fp0
= tcg_temp_new_i32();
6258 gen_load_fpr32(fp0
, fs
);
6259 gen_helper_float_recip_s(fp0
, fp0
);
6260 gen_store_fpr32(fp0
, fd
);
6261 tcg_temp_free_i32(fp0
);
6268 TCGv_i32 fp0
= tcg_temp_new_i32();
6270 gen_load_fpr32(fp0
, fs
);
6271 gen_helper_float_rsqrt_s(fp0
, fp0
);
6272 gen_store_fpr32(fp0
, fd
);
6273 tcg_temp_free_i32(fp0
);
6278 check_cp1_64bitmode(ctx
);
6280 TCGv_i32 fp0
= tcg_temp_new_i32();
6281 TCGv_i32 fp1
= tcg_temp_new_i32();
6283 gen_load_fpr32(fp0
, fs
);
6284 gen_load_fpr32(fp1
, fd
);
6285 gen_helper_float_recip2_s(fp0
, fp0
, fp1
);
6286 tcg_temp_free_i32(fp1
);
6287 gen_store_fpr32(fp0
, fd
);
6288 tcg_temp_free_i32(fp0
);
6293 check_cp1_64bitmode(ctx
);
6295 TCGv_i32 fp0
= tcg_temp_new_i32();
6297 gen_load_fpr32(fp0
, fs
);
6298 gen_helper_float_recip1_s(fp0
, fp0
);
6299 gen_store_fpr32(fp0
, fd
);
6300 tcg_temp_free_i32(fp0
);
6305 check_cp1_64bitmode(ctx
);
6307 TCGv_i32 fp0
= tcg_temp_new_i32();
6309 gen_load_fpr32(fp0
, fs
);
6310 gen_helper_float_rsqrt1_s(fp0
, fp0
);
6311 gen_store_fpr32(fp0
, fd
);
6312 tcg_temp_free_i32(fp0
);
6317 check_cp1_64bitmode(ctx
);
6319 TCGv_i32 fp0
= tcg_temp_new_i32();
6320 TCGv_i32 fp1
= tcg_temp_new_i32();
6322 gen_load_fpr32(fp0
, fs
);
6323 gen_load_fpr32(fp1
, ft
);
6324 gen_helper_float_rsqrt2_s(fp0
, fp0
, fp1
);
6325 tcg_temp_free_i32(fp1
);
6326 gen_store_fpr32(fp0
, fd
);
6327 tcg_temp_free_i32(fp0
);
6332 check_cp1_registers(ctx
, fd
);
6334 TCGv_i32 fp32
= tcg_temp_new_i32();
6335 TCGv_i64 fp64
= tcg_temp_new_i64();
6337 gen_load_fpr32(fp32
, fs
);
6338 gen_helper_float_cvtd_s(fp64
, fp32
);
6339 tcg_temp_free_i32(fp32
);
6340 gen_store_fpr64(ctx
, fp64
, fd
);
6341 tcg_temp_free_i64(fp64
);
6347 TCGv_i32 fp0
= tcg_temp_new_i32();
6349 gen_load_fpr32(fp0
, fs
);
6350 gen_helper_float_cvtw_s(fp0
, fp0
);
6351 gen_store_fpr32(fp0
, fd
);
6352 tcg_temp_free_i32(fp0
);
6357 check_cp1_64bitmode(ctx
);
6359 TCGv_i32 fp32
= tcg_temp_new_i32();
6360 TCGv_i64 fp64
= tcg_temp_new_i64();
6362 gen_load_fpr32(fp32
, fs
);
6363 gen_helper_float_cvtl_s(fp64
, fp32
);
6364 tcg_temp_free_i32(fp32
);
6365 gen_store_fpr64(ctx
, fp64
, fd
);
6366 tcg_temp_free_i64(fp64
);
6371 check_cp1_64bitmode(ctx
);
6373 TCGv_i64 fp64
= tcg_temp_new_i64();
6374 TCGv_i32 fp32_0
= tcg_temp_new_i32();
6375 TCGv_i32 fp32_1
= tcg_temp_new_i32();
6377 gen_load_fpr32(fp32_0
, fs
);
6378 gen_load_fpr32(fp32_1
, ft
);
6379 tcg_gen_concat_i32_i64(fp64
, fp32_0
, fp32_1
);
6380 tcg_temp_free_i32(fp32_1
);
6381 tcg_temp_free_i32(fp32_0
);
6382 gen_store_fpr64(ctx
, fp64
, fd
);
6383 tcg_temp_free_i64(fp64
);
6404 TCGv_i32 fp0
= tcg_temp_new_i32();
6405 TCGv_i32 fp1
= tcg_temp_new_i32();
6407 gen_load_fpr32(fp0
, fs
);
6408 gen_load_fpr32(fp1
, ft
);
6409 if (ctx
->opcode
& (1 << 6)) {
6411 gen_cmpabs_s(func
-48, fp0
, fp1
, cc
);
6412 opn
= condnames_abs
[func
-48];
6414 gen_cmp_s(func
-48, fp0
, fp1
, cc
);
6415 opn
= condnames
[func
-48];
6417 tcg_temp_free_i32(fp0
);
6418 tcg_temp_free_i32(fp1
);
6422 check_cp1_registers(ctx
, fs
| ft
| fd
);
6424 TCGv_i64 fp0
= tcg_temp_new_i64();
6425 TCGv_i64 fp1
= tcg_temp_new_i64();
6427 gen_load_fpr64(ctx
, fp0
, fs
);
6428 gen_load_fpr64(ctx
, fp1
, ft
);
6429 gen_helper_float_add_d(fp0
, fp0
, fp1
);
6430 tcg_temp_free_i64(fp1
);
6431 gen_store_fpr64(ctx
, fp0
, fd
);
6432 tcg_temp_free_i64(fp0
);
6438 check_cp1_registers(ctx
, fs
| ft
| fd
);
6440 TCGv_i64 fp0
= tcg_temp_new_i64();
6441 TCGv_i64 fp1
= tcg_temp_new_i64();
6443 gen_load_fpr64(ctx
, fp0
, fs
);
6444 gen_load_fpr64(ctx
, fp1
, ft
);
6445 gen_helper_float_sub_d(fp0
, fp0
, fp1
);
6446 tcg_temp_free_i64(fp1
);
6447 gen_store_fpr64(ctx
, fp0
, fd
);
6448 tcg_temp_free_i64(fp0
);
6454 check_cp1_registers(ctx
, fs
| ft
| fd
);
6456 TCGv_i64 fp0
= tcg_temp_new_i64();
6457 TCGv_i64 fp1
= tcg_temp_new_i64();
6459 gen_load_fpr64(ctx
, fp0
, fs
);
6460 gen_load_fpr64(ctx
, fp1
, ft
);
6461 gen_helper_float_mul_d(fp0
, fp0
, fp1
);
6462 tcg_temp_free_i64(fp1
);
6463 gen_store_fpr64(ctx
, fp0
, fd
);
6464 tcg_temp_free_i64(fp0
);
6470 check_cp1_registers(ctx
, fs
| ft
| fd
);
6472 TCGv_i64 fp0
= tcg_temp_new_i64();
6473 TCGv_i64 fp1
= tcg_temp_new_i64();
6475 gen_load_fpr64(ctx
, fp0
, fs
);
6476 gen_load_fpr64(ctx
, fp1
, ft
);
6477 gen_helper_float_div_d(fp0
, fp0
, fp1
);
6478 tcg_temp_free_i64(fp1
);
6479 gen_store_fpr64(ctx
, fp0
, fd
);
6480 tcg_temp_free_i64(fp0
);
6486 check_cp1_registers(ctx
, fs
| fd
);
6488 TCGv_i64 fp0
= tcg_temp_new_i64();
6490 gen_load_fpr64(ctx
, fp0
, fs
);
6491 gen_helper_float_sqrt_d(fp0
, fp0
);
6492 gen_store_fpr64(ctx
, fp0
, fd
);
6493 tcg_temp_free_i64(fp0
);
6498 check_cp1_registers(ctx
, fs
| fd
);
6500 TCGv_i64 fp0
= tcg_temp_new_i64();
6502 gen_load_fpr64(ctx
, fp0
, fs
);
6503 gen_helper_float_abs_d(fp0
, fp0
);
6504 gen_store_fpr64(ctx
, fp0
, fd
);
6505 tcg_temp_free_i64(fp0
);
6510 check_cp1_registers(ctx
, fs
| fd
);
6512 TCGv_i64 fp0
= tcg_temp_new_i64();
6514 gen_load_fpr64(ctx
, fp0
, fs
);
6515 gen_store_fpr64(ctx
, fp0
, fd
);
6516 tcg_temp_free_i64(fp0
);
6521 check_cp1_registers(ctx
, fs
| fd
);
6523 TCGv_i64 fp0
= tcg_temp_new_i64();
6525 gen_load_fpr64(ctx
, fp0
, fs
);
6526 gen_helper_float_chs_d(fp0
, fp0
);
6527 gen_store_fpr64(ctx
, fp0
, fd
);
6528 tcg_temp_free_i64(fp0
);
6533 check_cp1_64bitmode(ctx
);
6535 TCGv_i64 fp0
= tcg_temp_new_i64();
6537 gen_load_fpr64(ctx
, fp0
, fs
);
6538 gen_helper_float_roundl_d(fp0
, fp0
);
6539 gen_store_fpr64(ctx
, fp0
, fd
);
6540 tcg_temp_free_i64(fp0
);
6545 check_cp1_64bitmode(ctx
);
6547 TCGv_i64 fp0
= tcg_temp_new_i64();
6549 gen_load_fpr64(ctx
, fp0
, fs
);
6550 gen_helper_float_truncl_d(fp0
, fp0
);
6551 gen_store_fpr64(ctx
, fp0
, fd
);
6552 tcg_temp_free_i64(fp0
);
6557 check_cp1_64bitmode(ctx
);
6559 TCGv_i64 fp0
= tcg_temp_new_i64();
6561 gen_load_fpr64(ctx
, fp0
, fs
);
6562 gen_helper_float_ceill_d(fp0
, fp0
);
6563 gen_store_fpr64(ctx
, fp0
, fd
);
6564 tcg_temp_free_i64(fp0
);
6569 check_cp1_64bitmode(ctx
);
6571 TCGv_i64 fp0
= tcg_temp_new_i64();
6573 gen_load_fpr64(ctx
, fp0
, fs
);
6574 gen_helper_float_floorl_d(fp0
, fp0
);
6575 gen_store_fpr64(ctx
, fp0
, fd
);
6576 tcg_temp_free_i64(fp0
);
6581 check_cp1_registers(ctx
, fs
);
6583 TCGv_i32 fp32
= tcg_temp_new_i32();
6584 TCGv_i64 fp64
= tcg_temp_new_i64();
6586 gen_load_fpr64(ctx
, fp64
, fs
);
6587 gen_helper_float_roundw_d(fp32
, fp64
);
6588 tcg_temp_free_i64(fp64
);
6589 gen_store_fpr32(fp32
, fd
);
6590 tcg_temp_free_i32(fp32
);
6595 check_cp1_registers(ctx
, fs
);
6597 TCGv_i32 fp32
= tcg_temp_new_i32();
6598 TCGv_i64 fp64
= tcg_temp_new_i64();
6600 gen_load_fpr64(ctx
, fp64
, fs
);
6601 gen_helper_float_truncw_d(fp32
, fp64
);
6602 tcg_temp_free_i64(fp64
);
6603 gen_store_fpr32(fp32
, fd
);
6604 tcg_temp_free_i32(fp32
);
6609 check_cp1_registers(ctx
, fs
);
6611 TCGv_i32 fp32
= tcg_temp_new_i32();
6612 TCGv_i64 fp64
= tcg_temp_new_i64();
6614 gen_load_fpr64(ctx
, fp64
, fs
);
6615 gen_helper_float_ceilw_d(fp32
, fp64
);
6616 tcg_temp_free_i64(fp64
);
6617 gen_store_fpr32(fp32
, fd
);
6618 tcg_temp_free_i32(fp32
);
6623 check_cp1_registers(ctx
, fs
);
6625 TCGv_i32 fp32
= tcg_temp_new_i32();
6626 TCGv_i64 fp64
= tcg_temp_new_i64();
6628 gen_load_fpr64(ctx
, fp64
, fs
);
6629 gen_helper_float_floorw_d(fp32
, fp64
);
6630 tcg_temp_free_i64(fp64
);
6631 gen_store_fpr32(fp32
, fd
);
6632 tcg_temp_free_i32(fp32
);
6637 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6642 int l1
= gen_new_label();
6643 TCGv t0
= tcg_temp_new();
6644 TCGv_i64 fp0
= tcg_temp_local_new_i64();
6646 gen_load_gpr(t0
, ft
);
6647 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6648 gen_load_fpr64(ctx
, fp0
, fs
);
6649 gen_store_fpr64(ctx
, fp0
, fd
);
6650 tcg_temp_free_i64(fp0
);
6658 int l1
= gen_new_label();
6659 TCGv t0
= tcg_temp_new();
6660 TCGv_i64 fp0
= tcg_temp_local_new_i64();
6662 gen_load_gpr(t0
, ft
);
6663 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
6664 gen_load_fpr64(ctx
, fp0
, fs
);
6665 gen_store_fpr64(ctx
, fp0
, fd
);
6666 tcg_temp_free_i64(fp0
);
6673 check_cp1_64bitmode(ctx
);
6675 TCGv_i64 fp0
= tcg_temp_new_i64();
6677 gen_load_fpr64(ctx
, fp0
, fs
);
6678 gen_helper_float_recip_d(fp0
, fp0
);
6679 gen_store_fpr64(ctx
, fp0
, fd
);
6680 tcg_temp_free_i64(fp0
);
6685 check_cp1_64bitmode(ctx
);
6687 TCGv_i64 fp0
= tcg_temp_new_i64();
6689 gen_load_fpr64(ctx
, fp0
, fs
);
6690 gen_helper_float_rsqrt_d(fp0
, fp0
);
6691 gen_store_fpr64(ctx
, fp0
, fd
);
6692 tcg_temp_free_i64(fp0
);
6697 check_cp1_64bitmode(ctx
);
6699 TCGv_i64 fp0
= tcg_temp_new_i64();
6700 TCGv_i64 fp1
= tcg_temp_new_i64();
6702 gen_load_fpr64(ctx
, fp0
, fs
);
6703 gen_load_fpr64(ctx
, fp1
, ft
);
6704 gen_helper_float_recip2_d(fp0
, fp0
, fp1
);
6705 tcg_temp_free_i64(fp1
);
6706 gen_store_fpr64(ctx
, fp0
, fd
);
6707 tcg_temp_free_i64(fp0
);
6712 check_cp1_64bitmode(ctx
);
6714 TCGv_i64 fp0
= tcg_temp_new_i64();
6716 gen_load_fpr64(ctx
, fp0
, fs
);
6717 gen_helper_float_recip1_d(fp0
, fp0
);
6718 gen_store_fpr64(ctx
, fp0
, fd
);
6719 tcg_temp_free_i64(fp0
);
6724 check_cp1_64bitmode(ctx
);
6726 TCGv_i64 fp0
= tcg_temp_new_i64();
6728 gen_load_fpr64(ctx
, fp0
, fs
);
6729 gen_helper_float_rsqrt1_d(fp0
, fp0
);
6730 gen_store_fpr64(ctx
, fp0
, fd
);
6731 tcg_temp_free_i64(fp0
);
6736 check_cp1_64bitmode(ctx
);
6738 TCGv_i64 fp0
= tcg_temp_new_i64();
6739 TCGv_i64 fp1
= tcg_temp_new_i64();
6741 gen_load_fpr64(ctx
, fp0
, fs
);
6742 gen_load_fpr64(ctx
, fp1
, ft
);
6743 gen_helper_float_rsqrt2_d(fp0
, fp0
, fp1
);
6744 tcg_temp_free_i64(fp1
);
6745 gen_store_fpr64(ctx
, fp0
, fd
);
6746 tcg_temp_free_i64(fp0
);
6767 TCGv_i64 fp0
= tcg_temp_new_i64();
6768 TCGv_i64 fp1
= tcg_temp_new_i64();
6770 gen_load_fpr64(ctx
, fp0
, fs
);
6771 gen_load_fpr64(ctx
, fp1
, ft
);
6772 if (ctx
->opcode
& (1 << 6)) {
6774 check_cp1_registers(ctx
, fs
| ft
);
6775 gen_cmpabs_d(func
-48, fp0
, fp1
, cc
);
6776 opn
= condnames_abs
[func
-48];
6778 check_cp1_registers(ctx
, fs
| ft
);
6779 gen_cmp_d(func
-48, fp0
, fp1
, cc
);
6780 opn
= condnames
[func
-48];
6782 tcg_temp_free_i64(fp0
);
6783 tcg_temp_free_i64(fp1
);
6787 check_cp1_registers(ctx
, fs
);
6789 TCGv_i32 fp32
= tcg_temp_new_i32();
6790 TCGv_i64 fp64
= tcg_temp_new_i64();
6792 gen_load_fpr64(ctx
, fp64
, fs
);
6793 gen_helper_float_cvts_d(fp32
, fp64
);
6794 tcg_temp_free_i64(fp64
);
6795 gen_store_fpr32(fp32
, fd
);
6796 tcg_temp_free_i32(fp32
);
6801 check_cp1_registers(ctx
, fs
);
6803 TCGv_i32 fp32
= tcg_temp_new_i32();
6804 TCGv_i64 fp64
= tcg_temp_new_i64();
6806 gen_load_fpr64(ctx
, fp64
, fs
);
6807 gen_helper_float_cvtw_d(fp32
, fp64
);
6808 tcg_temp_free_i64(fp64
);
6809 gen_store_fpr32(fp32
, fd
);
6810 tcg_temp_free_i32(fp32
);
6815 check_cp1_64bitmode(ctx
);
6817 TCGv_i64 fp0
= tcg_temp_new_i64();
6819 gen_load_fpr64(ctx
, fp0
, fs
);
6820 gen_helper_float_cvtl_d(fp0
, fp0
);
6821 gen_store_fpr64(ctx
, fp0
, fd
);
6822 tcg_temp_free_i64(fp0
);
6828 TCGv_i32 fp0
= tcg_temp_new_i32();
6830 gen_load_fpr32(fp0
, fs
);
6831 gen_helper_float_cvts_w(fp0
, fp0
);
6832 gen_store_fpr32(fp0
, fd
);
6833 tcg_temp_free_i32(fp0
);
6838 check_cp1_registers(ctx
, fd
);
6840 TCGv_i32 fp32
= tcg_temp_new_i32();
6841 TCGv_i64 fp64
= tcg_temp_new_i64();
6843 gen_load_fpr32(fp32
, fs
);
6844 gen_helper_float_cvtd_w(fp64
, fp32
);
6845 tcg_temp_free_i32(fp32
);
6846 gen_store_fpr64(ctx
, fp64
, fd
);
6847 tcg_temp_free_i64(fp64
);
6852 check_cp1_64bitmode(ctx
);
6854 TCGv_i32 fp32
= tcg_temp_new_i32();
6855 TCGv_i64 fp64
= tcg_temp_new_i64();
6857 gen_load_fpr64(ctx
, fp64
, fs
);
6858 gen_helper_float_cvts_l(fp32
, fp64
);
6859 tcg_temp_free_i64(fp64
);
6860 gen_store_fpr32(fp32
, fd
);
6861 tcg_temp_free_i32(fp32
);
6866 check_cp1_64bitmode(ctx
);
6868 TCGv_i64 fp0
= tcg_temp_new_i64();
6870 gen_load_fpr64(ctx
, fp0
, fs
);
6871 gen_helper_float_cvtd_l(fp0
, fp0
);
6872 gen_store_fpr64(ctx
, fp0
, fd
);
6873 tcg_temp_free_i64(fp0
);
6878 check_cp1_64bitmode(ctx
);
6880 TCGv_i64 fp0
= tcg_temp_new_i64();
6882 gen_load_fpr64(ctx
, fp0
, fs
);
6883 gen_helper_float_cvtps_pw(fp0
, fp0
);
6884 gen_store_fpr64(ctx
, fp0
, fd
);
6885 tcg_temp_free_i64(fp0
);
6890 check_cp1_64bitmode(ctx
);
6892 TCGv_i64 fp0
= tcg_temp_new_i64();
6893 TCGv_i64 fp1
= tcg_temp_new_i64();
6895 gen_load_fpr64(ctx
, fp0
, fs
);
6896 gen_load_fpr64(ctx
, fp1
, ft
);
6897 gen_helper_float_add_ps(fp0
, fp0
, fp1
);
6898 tcg_temp_free_i64(fp1
);
6899 gen_store_fpr64(ctx
, fp0
, fd
);
6900 tcg_temp_free_i64(fp0
);
6905 check_cp1_64bitmode(ctx
);
6907 TCGv_i64 fp0
= tcg_temp_new_i64();
6908 TCGv_i64 fp1
= tcg_temp_new_i64();
6910 gen_load_fpr64(ctx
, fp0
, fs
);
6911 gen_load_fpr64(ctx
, fp1
, ft
);
6912 gen_helper_float_sub_ps(fp0
, fp0
, fp1
);
6913 tcg_temp_free_i64(fp1
);
6914 gen_store_fpr64(ctx
, fp0
, fd
);
6915 tcg_temp_free_i64(fp0
);
6920 check_cp1_64bitmode(ctx
);
6922 TCGv_i64 fp0
= tcg_temp_new_i64();
6923 TCGv_i64 fp1
= tcg_temp_new_i64();
6925 gen_load_fpr64(ctx
, fp0
, fs
);
6926 gen_load_fpr64(ctx
, fp1
, ft
);
6927 gen_helper_float_mul_ps(fp0
, fp0
, fp1
);
6928 tcg_temp_free_i64(fp1
);
6929 gen_store_fpr64(ctx
, fp0
, fd
);
6930 tcg_temp_free_i64(fp0
);
6935 check_cp1_64bitmode(ctx
);
6937 TCGv_i64 fp0
= tcg_temp_new_i64();
6939 gen_load_fpr64(ctx
, fp0
, fs
);
6940 gen_helper_float_abs_ps(fp0
, fp0
);
6941 gen_store_fpr64(ctx
, fp0
, fd
);
6942 tcg_temp_free_i64(fp0
);
6947 check_cp1_64bitmode(ctx
);
6949 TCGv_i64 fp0
= tcg_temp_new_i64();
6951 gen_load_fpr64(ctx
, fp0
, fs
);
6952 gen_store_fpr64(ctx
, fp0
, fd
);
6953 tcg_temp_free_i64(fp0
);
6958 check_cp1_64bitmode(ctx
);
6960 TCGv_i64 fp0
= tcg_temp_new_i64();
6962 gen_load_fpr64(ctx
, fp0
, fs
);
6963 gen_helper_float_chs_ps(fp0
, fp0
);
6964 gen_store_fpr64(ctx
, fp0
, fd
);
6965 tcg_temp_free_i64(fp0
);
6970 check_cp1_64bitmode(ctx
);
6971 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6975 check_cp1_64bitmode(ctx
);
6977 int l1
= gen_new_label();
6978 TCGv t0
= tcg_temp_new();
6979 TCGv_i32 fp0
= tcg_temp_local_new_i32();
6980 TCGv_i32 fph0
= tcg_temp_local_new_i32();
6982 gen_load_gpr(t0
, ft
);
6983 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
6984 gen_load_fpr32(fp0
, fs
);
6985 gen_load_fpr32h(fph0
, fs
);
6986 gen_store_fpr32(fp0
, fd
);
6987 gen_store_fpr32h(fph0
, fd
);
6988 tcg_temp_free_i32(fp0
);
6989 tcg_temp_free_i32(fph0
);
6996 check_cp1_64bitmode(ctx
);
6998 int l1
= gen_new_label();
6999 TCGv t0
= tcg_temp_new();
7000 TCGv_i32 fp0
= tcg_temp_local_new_i32();
7001 TCGv_i32 fph0
= tcg_temp_local_new_i32();
7003 gen_load_gpr(t0
, ft
);
7004 tcg_gen_brcondi_tl(TCG_COND_EQ
, t0
, 0, l1
);
7005 gen_load_fpr32(fp0
, fs
);
7006 gen_load_fpr32h(fph0
, fs
);
7007 gen_store_fpr32(fp0
, fd
);
7008 gen_store_fpr32h(fph0
, fd
);
7009 tcg_temp_free_i32(fp0
);
7010 tcg_temp_free_i32(fph0
);
7017 check_cp1_64bitmode(ctx
);
7019 TCGv_i64 fp0
= tcg_temp_new_i64();
7020 TCGv_i64 fp1
= tcg_temp_new_i64();
7022 gen_load_fpr64(ctx
, fp0
, ft
);
7023 gen_load_fpr64(ctx
, fp1
, fs
);
7024 gen_helper_float_addr_ps(fp0
, fp0
, fp1
);
7025 tcg_temp_free_i64(fp1
);
7026 gen_store_fpr64(ctx
, fp0
, fd
);
7027 tcg_temp_free_i64(fp0
);
7032 check_cp1_64bitmode(ctx
);
7034 TCGv_i64 fp0
= tcg_temp_new_i64();
7035 TCGv_i64 fp1
= tcg_temp_new_i64();
7037 gen_load_fpr64(ctx
, fp0
, ft
);
7038 gen_load_fpr64(ctx
, fp1
, fs
);
7039 gen_helper_float_mulr_ps(fp0
, fp0
, fp1
);
7040 tcg_temp_free_i64(fp1
);
7041 gen_store_fpr64(ctx
, fp0
, fd
);
7042 tcg_temp_free_i64(fp0
);
7047 check_cp1_64bitmode(ctx
);
7049 TCGv_i64 fp0
= tcg_temp_new_i64();
7050 TCGv_i64 fp1
= tcg_temp_new_i64();
7052 gen_load_fpr64(ctx
, fp0
, fs
);
7053 gen_load_fpr64(ctx
, fp1
, fd
);
7054 gen_helper_float_recip2_ps(fp0
, fp0
, fp1
);
7055 tcg_temp_free_i64(fp1
);
7056 gen_store_fpr64(ctx
, fp0
, fd
);
7057 tcg_temp_free_i64(fp0
);
7062 check_cp1_64bitmode(ctx
);
7064 TCGv_i64 fp0
= tcg_temp_new_i64();
7066 gen_load_fpr64(ctx
, fp0
, fs
);
7067 gen_helper_float_recip1_ps(fp0
, fp0
);
7068 gen_store_fpr64(ctx
, fp0
, fd
);
7069 tcg_temp_free_i64(fp0
);
7074 check_cp1_64bitmode(ctx
);
7076 TCGv_i64 fp0
= tcg_temp_new_i64();
7078 gen_load_fpr64(ctx
, fp0
, fs
);
7079 gen_helper_float_rsqrt1_ps(fp0
, fp0
);
7080 gen_store_fpr64(ctx
, fp0
, fd
);
7081 tcg_temp_free_i64(fp0
);
7086 check_cp1_64bitmode(ctx
);
7088 TCGv_i64 fp0
= tcg_temp_new_i64();
7089 TCGv_i64 fp1
= tcg_temp_new_i64();
7091 gen_load_fpr64(ctx
, fp0
, fs
);
7092 gen_load_fpr64(ctx
, fp1
, ft
);
7093 gen_helper_float_rsqrt2_ps(fp0
, fp0
, fp1
);
7094 tcg_temp_free_i64(fp1
);
7095 gen_store_fpr64(ctx
, fp0
, fd
);
7096 tcg_temp_free_i64(fp0
);
7101 check_cp1_64bitmode(ctx
);
7103 TCGv_i32 fp0
= tcg_temp_new_i32();
7105 gen_load_fpr32h(fp0
, fs
);
7106 gen_helper_float_cvts_pu(fp0
, fp0
);
7107 gen_store_fpr32(fp0
, fd
);
7108 tcg_temp_free_i32(fp0
);
7113 check_cp1_64bitmode(ctx
);
7115 TCGv_i64 fp0
= tcg_temp_new_i64();
7117 gen_load_fpr64(ctx
, fp0
, fs
);
7118 gen_helper_float_cvtpw_ps(fp0
, fp0
);
7119 gen_store_fpr64(ctx
, fp0
, fd
);
7120 tcg_temp_free_i64(fp0
);
7125 check_cp1_64bitmode(ctx
);
7127 TCGv_i32 fp0
= tcg_temp_new_i32();
7129 gen_load_fpr32(fp0
, fs
);
7130 gen_helper_float_cvts_pl(fp0
, fp0
);
7131 gen_store_fpr32(fp0
, fd
);
7132 tcg_temp_free_i32(fp0
);
7137 check_cp1_64bitmode(ctx
);
7139 TCGv_i32 fp0
= tcg_temp_new_i32();
7140 TCGv_i32 fp1
= tcg_temp_new_i32();
7142 gen_load_fpr32(fp0
, fs
);
7143 gen_load_fpr32(fp1
, ft
);
7144 gen_store_fpr32h(fp0
, fd
);
7145 gen_store_fpr32(fp1
, fd
);
7146 tcg_temp_free_i32(fp0
);
7147 tcg_temp_free_i32(fp1
);
7152 check_cp1_64bitmode(ctx
);
7154 TCGv_i32 fp0
= tcg_temp_new_i32();
7155 TCGv_i32 fp1
= tcg_temp_new_i32();
7157 gen_load_fpr32(fp0
, fs
);
7158 gen_load_fpr32h(fp1
, ft
);
7159 gen_store_fpr32(fp1
, fd
);
7160 gen_store_fpr32h(fp0
, fd
);
7161 tcg_temp_free_i32(fp0
);
7162 tcg_temp_free_i32(fp1
);
7167 check_cp1_64bitmode(ctx
);
7169 TCGv_i32 fp0
= tcg_temp_new_i32();
7170 TCGv_i32 fp1
= tcg_temp_new_i32();
7172 gen_load_fpr32h(fp0
, fs
);
7173 gen_load_fpr32(fp1
, ft
);
7174 gen_store_fpr32(fp1
, fd
);
7175 gen_store_fpr32h(fp0
, fd
);
7176 tcg_temp_free_i32(fp0
);
7177 tcg_temp_free_i32(fp1
);
7182 check_cp1_64bitmode(ctx
);
7184 TCGv_i32 fp0
= tcg_temp_new_i32();
7185 TCGv_i32 fp1
= tcg_temp_new_i32();
7187 gen_load_fpr32h(fp0
, fs
);
7188 gen_load_fpr32h(fp1
, ft
);
7189 gen_store_fpr32(fp1
, fd
);
7190 gen_store_fpr32h(fp0
, fd
);
7191 tcg_temp_free_i32(fp0
);
7192 tcg_temp_free_i32(fp1
);
7212 check_cp1_64bitmode(ctx
);
7214 TCGv_i64 fp0
= tcg_temp_new_i64();
7215 TCGv_i64 fp1
= tcg_temp_new_i64();
7217 gen_load_fpr64(ctx
, fp0
, fs
);
7218 gen_load_fpr64(ctx
, fp1
, ft
);
7219 if (ctx
->opcode
& (1 << 6)) {
7220 gen_cmpabs_ps(func
-48, fp0
, fp1
, cc
);
7221 opn
= condnames_abs
[func
-48];
7223 gen_cmp_ps(func
-48, fp0
, fp1
, cc
);
7224 opn
= condnames
[func
-48];
7226 tcg_temp_free_i64(fp0
);
7227 tcg_temp_free_i64(fp1
);
7232 generate_exception (ctx
, EXCP_RI
);
7237 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
7240 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
7243 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
7248 /* Coprocessor 3 (FPU) */
7249 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
7250 int fd
, int fs
, int base
, int index
)
7252 const char *opn
= "extended float load/store";
7254 TCGv t0
= tcg_temp_local_new();
7255 TCGv t1
= tcg_temp_local_new();
7258 gen_load_gpr(t0
, index
);
7259 } else if (index
== 0) {
7260 gen_load_gpr(t0
, base
);
7262 gen_load_gpr(t0
, base
);
7263 gen_load_gpr(t1
, index
);
7264 gen_op_addr_add(ctx
, t0
, t1
);
7266 /* Don't do NOP if destination is zero: we must perform the actual
7272 TCGv_i32 fp0
= tcg_temp_new_i32();
7274 tcg_gen_qemu_ld32s(t1
, t0
, ctx
->mem_idx
);
7275 tcg_gen_trunc_tl_i32(fp0
, t1
);
7276 gen_store_fpr32(fp0
, fd
);
7277 tcg_temp_free_i32(fp0
);
7283 check_cp1_registers(ctx
, fd
);
7285 TCGv_i64 fp0
= tcg_temp_new_i64();
7287 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7288 gen_store_fpr64(ctx
, fp0
, fd
);
7289 tcg_temp_free_i64(fp0
);
7294 check_cp1_64bitmode(ctx
);
7295 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7297 TCGv_i64 fp0
= tcg_temp_new_i64();
7299 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7300 gen_store_fpr64(ctx
, fp0
, fd
);
7301 tcg_temp_free_i64(fp0
);
7308 TCGv_i32 fp0
= tcg_temp_new_i32();
7310 gen_load_fpr32(fp0
, fs
);
7311 tcg_gen_extu_i32_tl(t1
, fp0
);
7312 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
7313 tcg_temp_free_i32(fp0
);
7320 check_cp1_registers(ctx
, fs
);
7322 TCGv_i64 fp0
= tcg_temp_new_i64();
7324 gen_load_fpr64(ctx
, fp0
, fs
);
7325 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7326 tcg_temp_free_i64(fp0
);
7332 check_cp1_64bitmode(ctx
);
7333 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7335 TCGv_i64 fp0
= tcg_temp_new_i64();
7337 gen_load_fpr64(ctx
, fp0
, fs
);
7338 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7339 tcg_temp_free_i64(fp0
);
7346 generate_exception(ctx
, EXCP_RI
);
7353 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
7354 regnames
[index
], regnames
[base
]);
7357 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
7358 int fd
, int fr
, int fs
, int ft
)
7360 const char *opn
= "flt3_arith";
7364 check_cp1_64bitmode(ctx
);
7366 TCGv t0
= tcg_temp_local_new();
7367 TCGv_i32 fp0
= tcg_temp_local_new_i32();
7368 TCGv_i32 fph0
= tcg_temp_local_new_i32();
7369 TCGv_i32 fp1
= tcg_temp_local_new_i32();
7370 TCGv_i32 fph1
= tcg_temp_local_new_i32();
7371 int l1
= gen_new_label();
7372 int l2
= gen_new_label();
7374 gen_load_gpr(t0
, fr
);
7375 tcg_gen_andi_tl(t0
, t0
, 0x7);
7376 gen_load_fpr32(fp0
, fs
);
7377 gen_load_fpr32h(fph0
, fs
);
7378 gen_load_fpr32(fp1
, ft
);
7379 gen_load_fpr32h(fph1
, ft
);
7381 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
7382 gen_store_fpr32(fp0
, fd
);
7383 gen_store_fpr32h(fph0
, fd
);
7386 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
7388 #ifdef TARGET_WORDS_BIGENDIAN
7389 gen_store_fpr32(fph1
, fd
);
7390 gen_store_fpr32h(fp0
, fd
);
7392 gen_store_fpr32(fph0
, fd
);
7393 gen_store_fpr32h(fp1
, fd
);
7396 tcg_temp_free_i32(fp0
);
7397 tcg_temp_free_i32(fph0
);
7398 tcg_temp_free_i32(fp1
);
7399 tcg_temp_free_i32(fph1
);
7406 TCGv_i32 fp0
= tcg_temp_new_i32();
7407 TCGv_i32 fp1
= tcg_temp_new_i32();
7408 TCGv_i32 fp2
= tcg_temp_new_i32();
7410 gen_load_fpr32(fp0
, fs
);
7411 gen_load_fpr32(fp1
, ft
);
7412 gen_load_fpr32(fp2
, fr
);
7413 gen_helper_float_muladd_s(fp2
, fp0
, fp1
, fp2
);
7414 tcg_temp_free_i32(fp0
);
7415 tcg_temp_free_i32(fp1
);
7416 gen_store_fpr32(fp2
, fd
);
7417 tcg_temp_free_i32(fp2
);
7423 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7425 TCGv_i64 fp0
= tcg_temp_new_i64();
7426 TCGv_i64 fp1
= tcg_temp_new_i64();
7427 TCGv_i64 fp2
= tcg_temp_new_i64();
7429 gen_load_fpr64(ctx
, fp0
, fs
);
7430 gen_load_fpr64(ctx
, fp1
, ft
);
7431 gen_load_fpr64(ctx
, fp2
, fr
);
7432 gen_helper_float_muladd_d(fp2
, fp0
, fp1
, fp2
);
7433 tcg_temp_free_i64(fp0
);
7434 tcg_temp_free_i64(fp1
);
7435 gen_store_fpr64(ctx
, fp2
, fd
);
7436 tcg_temp_free_i64(fp2
);
7441 check_cp1_64bitmode(ctx
);
7443 TCGv_i64 fp0
= tcg_temp_new_i64();
7444 TCGv_i64 fp1
= tcg_temp_new_i64();
7445 TCGv_i64 fp2
= tcg_temp_new_i64();
7447 gen_load_fpr64(ctx
, fp0
, fs
);
7448 gen_load_fpr64(ctx
, fp1
, ft
);
7449 gen_load_fpr64(ctx
, fp2
, fr
);
7450 gen_helper_float_muladd_ps(fp2
, fp0
, fp1
, fp2
);
7451 tcg_temp_free_i64(fp0
);
7452 tcg_temp_free_i64(fp1
);
7453 gen_store_fpr64(ctx
, fp2
, fd
);
7454 tcg_temp_free_i64(fp2
);
7461 TCGv_i32 fp0
= tcg_temp_new_i32();
7462 TCGv_i32 fp1
= tcg_temp_new_i32();
7463 TCGv_i32 fp2
= tcg_temp_new_i32();
7465 gen_load_fpr32(fp0
, fs
);
7466 gen_load_fpr32(fp1
, ft
);
7467 gen_load_fpr32(fp2
, fr
);
7468 gen_helper_float_mulsub_s(fp2
, fp0
, fp1
, fp2
);
7469 tcg_temp_free_i32(fp0
);
7470 tcg_temp_free_i32(fp1
);
7471 gen_store_fpr32(fp2
, fd
);
7472 tcg_temp_free_i32(fp2
);
7478 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7480 TCGv_i64 fp0
= tcg_temp_new_i64();
7481 TCGv_i64 fp1
= tcg_temp_new_i64();
7482 TCGv_i64 fp2
= tcg_temp_new_i64();
7484 gen_load_fpr64(ctx
, fp0
, fs
);
7485 gen_load_fpr64(ctx
, fp1
, ft
);
7486 gen_load_fpr64(ctx
, fp2
, fr
);
7487 gen_helper_float_mulsub_d(fp2
, fp0
, fp1
, fp2
);
7488 tcg_temp_free_i64(fp0
);
7489 tcg_temp_free_i64(fp1
);
7490 gen_store_fpr64(ctx
, fp2
, fd
);
7491 tcg_temp_free_i64(fp2
);
7496 check_cp1_64bitmode(ctx
);
7498 TCGv_i64 fp0
= tcg_temp_new_i64();
7499 TCGv_i64 fp1
= tcg_temp_new_i64();
7500 TCGv_i64 fp2
= tcg_temp_new_i64();
7502 gen_load_fpr64(ctx
, fp0
, fs
);
7503 gen_load_fpr64(ctx
, fp1
, ft
);
7504 gen_load_fpr64(ctx
, fp2
, fr
);
7505 gen_helper_float_mulsub_ps(fp2
, fp0
, fp1
, fp2
);
7506 tcg_temp_free_i64(fp0
);
7507 tcg_temp_free_i64(fp1
);
7508 gen_store_fpr64(ctx
, fp2
, fd
);
7509 tcg_temp_free_i64(fp2
);
7516 TCGv_i32 fp0
= tcg_temp_new_i32();
7517 TCGv_i32 fp1
= tcg_temp_new_i32();
7518 TCGv_i32 fp2
= tcg_temp_new_i32();
7520 gen_load_fpr32(fp0
, fs
);
7521 gen_load_fpr32(fp1
, ft
);
7522 gen_load_fpr32(fp2
, fr
);
7523 gen_helper_float_nmuladd_s(fp2
, fp0
, fp1
, fp2
);
7524 tcg_temp_free_i32(fp0
);
7525 tcg_temp_free_i32(fp1
);
7526 gen_store_fpr32(fp2
, fd
);
7527 tcg_temp_free_i32(fp2
);
7533 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7535 TCGv_i64 fp0
= tcg_temp_new_i64();
7536 TCGv_i64 fp1
= tcg_temp_new_i64();
7537 TCGv_i64 fp2
= tcg_temp_new_i64();
7539 gen_load_fpr64(ctx
, fp0
, fs
);
7540 gen_load_fpr64(ctx
, fp1
, ft
);
7541 gen_load_fpr64(ctx
, fp2
, fr
);
7542 gen_helper_float_nmuladd_d(fp2
, fp0
, fp1
, fp2
);
7543 tcg_temp_free_i64(fp0
);
7544 tcg_temp_free_i64(fp1
);
7545 gen_store_fpr64(ctx
, fp2
, fd
);
7546 tcg_temp_free_i64(fp2
);
7551 check_cp1_64bitmode(ctx
);
7553 TCGv_i64 fp0
= tcg_temp_new_i64();
7554 TCGv_i64 fp1
= tcg_temp_new_i64();
7555 TCGv_i64 fp2
= tcg_temp_new_i64();
7557 gen_load_fpr64(ctx
, fp0
, fs
);
7558 gen_load_fpr64(ctx
, fp1
, ft
);
7559 gen_load_fpr64(ctx
, fp2
, fr
);
7560 gen_helper_float_nmuladd_ps(fp2
, fp0
, fp1
, fp2
);
7561 tcg_temp_free_i64(fp0
);
7562 tcg_temp_free_i64(fp1
);
7563 gen_store_fpr64(ctx
, fp2
, fd
);
7564 tcg_temp_free_i64(fp2
);
7571 TCGv_i32 fp0
= tcg_temp_new_i32();
7572 TCGv_i32 fp1
= tcg_temp_new_i32();
7573 TCGv_i32 fp2
= tcg_temp_new_i32();
7575 gen_load_fpr32(fp0
, fs
);
7576 gen_load_fpr32(fp1
, ft
);
7577 gen_load_fpr32(fp2
, fr
);
7578 gen_helper_float_nmulsub_s(fp2
, fp0
, fp1
, fp2
);
7579 tcg_temp_free_i32(fp0
);
7580 tcg_temp_free_i32(fp1
);
7581 gen_store_fpr32(fp2
, fd
);
7582 tcg_temp_free_i32(fp2
);
7588 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7590 TCGv_i64 fp0
= tcg_temp_new_i64();
7591 TCGv_i64 fp1
= tcg_temp_new_i64();
7592 TCGv_i64 fp2
= tcg_temp_new_i64();
7594 gen_load_fpr64(ctx
, fp0
, fs
);
7595 gen_load_fpr64(ctx
, fp1
, ft
);
7596 gen_load_fpr64(ctx
, fp2
, fr
);
7597 gen_helper_float_nmulsub_d(fp2
, fp0
, fp1
, fp2
);
7598 tcg_temp_free_i64(fp0
);
7599 tcg_temp_free_i64(fp1
);
7600 gen_store_fpr64(ctx
, fp2
, fd
);
7601 tcg_temp_free_i64(fp2
);
7606 check_cp1_64bitmode(ctx
);
7608 TCGv_i64 fp0
= tcg_temp_new_i64();
7609 TCGv_i64 fp1
= tcg_temp_new_i64();
7610 TCGv_i64 fp2
= tcg_temp_new_i64();
7612 gen_load_fpr64(ctx
, fp0
, fs
);
7613 gen_load_fpr64(ctx
, fp1
, ft
);
7614 gen_load_fpr64(ctx
, fp2
, fr
);
7615 gen_helper_float_nmulsub_ps(fp2
, fp0
, fp1
, fp2
);
7616 tcg_temp_free_i64(fp0
);
7617 tcg_temp_free_i64(fp1
);
7618 gen_store_fpr64(ctx
, fp2
, fd
);
7619 tcg_temp_free_i64(fp2
);
7625 generate_exception (ctx
, EXCP_RI
);
7628 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
7629 fregnames
[fs
], fregnames
[ft
]);
7632 /* ISA extensions (ASEs) */
7633 /* MIPS16 extension to MIPS32 */
7634 /* SmartMIPS extension to MIPS32 */
7636 #if defined(TARGET_MIPS64)
7638 /* MDMX extension to MIPS64 */
7642 static void decode_opc (CPUState
*env
, DisasContext
*ctx
)
7646 uint32_t op
, op1
, op2
;
7649 /* make sure instructions are on a word boundary */
7650 if (ctx
->pc
& 0x3) {
7651 env
->CP0_BadVAddr
= ctx
->pc
;
7652 generate_exception(ctx
, EXCP_AdEL
);
7656 /* Handle blikely not taken case */
7657 if ((ctx
->hflags
& MIPS_HFLAG_BMASK
) == MIPS_HFLAG_BL
) {
7658 int l1
= gen_new_label();
7660 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
7661 tcg_gen_brcondi_i32(TCG_COND_NE
, bcond
, 0, l1
);
7663 TCGv_i32 r_tmp
= tcg_temp_new_i32();
7665 tcg_gen_movi_i32(r_tmp
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
7666 tcg_gen_st_i32(r_tmp
, cpu_env
, offsetof(CPUState
, hflags
));
7667 tcg_temp_free_i32(r_tmp
);
7669 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
7672 op
= MASK_OP_MAJOR(ctx
->opcode
);
7673 rs
= (ctx
->opcode
>> 21) & 0x1f;
7674 rt
= (ctx
->opcode
>> 16) & 0x1f;
7675 rd
= (ctx
->opcode
>> 11) & 0x1f;
7676 sa
= (ctx
->opcode
>> 6) & 0x1f;
7677 imm
= (int16_t)ctx
->opcode
;
7680 op1
= MASK_SPECIAL(ctx
->opcode
);
7682 case OPC_SLL
: /* Arithmetic with immediate */
7683 case OPC_SRL
... OPC_SRA
:
7684 gen_arith_imm(env
, ctx
, op1
, rd
, rt
, sa
);
7686 case OPC_MOVZ
... OPC_MOVN
:
7687 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
7688 case OPC_SLLV
: /* Arithmetic */
7689 case OPC_SRLV
... OPC_SRAV
:
7690 case OPC_ADD
... OPC_NOR
:
7691 case OPC_SLT
... OPC_SLTU
:
7692 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7694 case OPC_MULT
... OPC_DIVU
:
7696 check_insn(env
, ctx
, INSN_VR54XX
);
7697 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
7698 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
7700 gen_muldiv(ctx
, op1
, rs
, rt
);
7702 case OPC_JR
... OPC_JALR
:
7703 gen_compute_branch(ctx
, op1
, rs
, rd
, sa
);
7705 case OPC_TGE
... OPC_TEQ
: /* Traps */
7707 gen_trap(ctx
, op1
, rs
, rt
, -1);
7709 case OPC_MFHI
: /* Move from HI/LO */
7711 gen_HILO(ctx
, op1
, rd
);
7714 case OPC_MTLO
: /* Move to HI/LO */
7715 gen_HILO(ctx
, op1
, rs
);
7717 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
7718 #ifdef MIPS_STRICT_STANDARD
7719 MIPS_INVAL("PMON / selsl");
7720 generate_exception(ctx
, EXCP_RI
);
7722 gen_helper_0i(pmon
, sa
);
7726 generate_exception(ctx
, EXCP_SYSCALL
);
7729 generate_exception(ctx
, EXCP_BREAK
);
7732 #ifdef MIPS_STRICT_STANDARD
7734 generate_exception(ctx
, EXCP_RI
);
7736 /* Implemented as RI exception for now. */
7737 MIPS_INVAL("spim (unofficial)");
7738 generate_exception(ctx
, EXCP_RI
);
7746 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
7747 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
7748 save_cpu_state(ctx
, 1);
7749 check_cp1_enabled(ctx
);
7750 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
7751 (ctx
->opcode
>> 16) & 1);
7753 generate_exception_err(ctx
, EXCP_CpU
, 1);
7757 #if defined(TARGET_MIPS64)
7758 /* MIPS64 specific opcodes */
7760 case OPC_DSRL
... OPC_DSRA
:
7762 case OPC_DSRL32
... OPC_DSRA32
:
7763 check_insn(env
, ctx
, ISA_MIPS3
);
7765 gen_arith_imm(env
, ctx
, op1
, rd
, rt
, sa
);
7768 case OPC_DSRLV
... OPC_DSRAV
:
7769 case OPC_DADD
... OPC_DSUBU
:
7770 check_insn(env
, ctx
, ISA_MIPS3
);
7772 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7774 case OPC_DMULT
... OPC_DDIVU
:
7775 check_insn(env
, ctx
, ISA_MIPS3
);
7777 gen_muldiv(ctx
, op1
, rs
, rt
);
7780 default: /* Invalid */
7781 MIPS_INVAL("special");
7782 generate_exception(ctx
, EXCP_RI
);
7787 op1
= MASK_SPECIAL2(ctx
->opcode
);
7789 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
7790 case OPC_MSUB
... OPC_MSUBU
:
7791 check_insn(env
, ctx
, ISA_MIPS32
);
7792 gen_muldiv(ctx
, op1
, rs
, rt
);
7795 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
7797 case OPC_CLZ
... OPC_CLO
:
7798 check_insn(env
, ctx
, ISA_MIPS32
);
7799 gen_cl(ctx
, op1
, rd
, rs
);
7802 /* XXX: not clear which exception should be raised
7803 * when in debug mode...
7805 check_insn(env
, ctx
, ISA_MIPS32
);
7806 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
7807 generate_exception(ctx
, EXCP_DBp
);
7809 generate_exception(ctx
, EXCP_DBp
);
7813 #if defined(TARGET_MIPS64)
7814 case OPC_DCLZ
... OPC_DCLO
:
7815 check_insn(env
, ctx
, ISA_MIPS64
);
7817 gen_cl(ctx
, op1
, rd
, rs
);
7820 default: /* Invalid */
7821 MIPS_INVAL("special2");
7822 generate_exception(ctx
, EXCP_RI
);
7827 op1
= MASK_SPECIAL3(ctx
->opcode
);
7831 check_insn(env
, ctx
, ISA_MIPS32R2
);
7832 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
7835 check_insn(env
, ctx
, ISA_MIPS32R2
);
7836 op2
= MASK_BSHFL(ctx
->opcode
);
7837 gen_bshfl(ctx
, op2
, rt
, rd
);
7840 check_insn(env
, ctx
, ISA_MIPS32R2
);
7842 TCGv t0
= tcg_temp_local_new();
7846 save_cpu_state(ctx
, 1);
7847 gen_helper_rdhwr_cpunum(t0
);
7850 save_cpu_state(ctx
, 1);
7851 gen_helper_rdhwr_synci_step(t0
);
7854 save_cpu_state(ctx
, 1);
7855 gen_helper_rdhwr_cc(t0
);
7858 save_cpu_state(ctx
, 1);
7859 gen_helper_rdhwr_ccres(t0
);
7862 if (env
->user_mode_only
) {
7863 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, tls_value
));
7866 /* XXX: Some CPUs implement this in hardware.
7867 Not supported yet. */
7869 default: /* Invalid */
7870 MIPS_INVAL("rdhwr");
7871 generate_exception(ctx
, EXCP_RI
);
7874 gen_store_gpr(t0
, rt
);
7879 check_insn(env
, ctx
, ASE_MT
);
7881 TCGv t0
= tcg_temp_local_new();
7882 TCGv t1
= tcg_temp_local_new();
7884 gen_load_gpr(t0
, rt
);
7885 gen_load_gpr(t1
, rs
);
7886 gen_helper_fork(t0
, t1
);
7892 check_insn(env
, ctx
, ASE_MT
);
7894 TCGv t0
= tcg_temp_local_new();
7896 gen_load_gpr(t0
, rs
);
7897 gen_helper_yield(t0
, t0
);
7898 gen_store_gpr(t0
, rd
);
7902 #if defined(TARGET_MIPS64)
7903 case OPC_DEXTM
... OPC_DEXT
:
7904 case OPC_DINSM
... OPC_DINS
:
7905 check_insn(env
, ctx
, ISA_MIPS64R2
);
7907 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
7910 check_insn(env
, ctx
, ISA_MIPS64R2
);
7912 op2
= MASK_DBSHFL(ctx
->opcode
);
7913 gen_bshfl(ctx
, op2
, rt
, rd
);
7916 default: /* Invalid */
7917 MIPS_INVAL("special3");
7918 generate_exception(ctx
, EXCP_RI
);
7923 op1
= MASK_REGIMM(ctx
->opcode
);
7925 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
7926 case OPC_BLTZAL
... OPC_BGEZALL
:
7927 gen_compute_branch(ctx
, op1
, rs
, -1, imm
<< 2);
7929 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
7931 gen_trap(ctx
, op1
, rs
, -1, imm
);
7934 check_insn(env
, ctx
, ISA_MIPS32R2
);
7937 default: /* Invalid */
7938 MIPS_INVAL("regimm");
7939 generate_exception(ctx
, EXCP_RI
);
7944 check_cp0_enabled(ctx
);
7945 op1
= MASK_CP0(ctx
->opcode
);
7951 #if defined(TARGET_MIPS64)
7955 #ifndef CONFIG_USER_ONLY
7956 if (!env
->user_mode_only
)
7957 gen_cp0(env
, ctx
, op1
, rt
, rd
);
7958 #endif /* !CONFIG_USER_ONLY */
7960 case OPC_C0_FIRST
... OPC_C0_LAST
:
7961 #ifndef CONFIG_USER_ONLY
7962 if (!env
->user_mode_only
)
7963 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
7964 #endif /* !CONFIG_USER_ONLY */
7967 #ifndef CONFIG_USER_ONLY
7968 if (!env
->user_mode_only
) {
7969 TCGv t0
= tcg_temp_local_new();
7971 op2
= MASK_MFMC0(ctx
->opcode
);
7974 check_insn(env
, ctx
, ASE_MT
);
7975 gen_helper_dmt(t0
, t0
);
7978 check_insn(env
, ctx
, ASE_MT
);
7979 gen_helper_emt(t0
, t0
);
7982 check_insn(env
, ctx
, ASE_MT
);
7983 gen_helper_dvpe(t0
, t0
);
7986 check_insn(env
, ctx
, ASE_MT
);
7987 gen_helper_evpe(t0
, t0
);
7990 check_insn(env
, ctx
, ISA_MIPS32R2
);
7991 save_cpu_state(ctx
, 1);
7993 /* Stop translation as we may have switched the execution mode */
7994 ctx
->bstate
= BS_STOP
;
7997 check_insn(env
, ctx
, ISA_MIPS32R2
);
7998 save_cpu_state(ctx
, 1);
8000 /* Stop translation as we may have switched the execution mode */
8001 ctx
->bstate
= BS_STOP
;
8003 default: /* Invalid */
8004 MIPS_INVAL("mfmc0");
8005 generate_exception(ctx
, EXCP_RI
);
8008 gen_store_gpr(t0
, rt
);
8011 #endif /* !CONFIG_USER_ONLY */
8014 check_insn(env
, ctx
, ISA_MIPS32R2
);
8015 gen_load_srsgpr(rt
, rd
);
8018 check_insn(env
, ctx
, ISA_MIPS32R2
);
8019 gen_store_srsgpr(rt
, rd
);
8023 generate_exception(ctx
, EXCP_RI
);
8027 case OPC_ADDI
... OPC_LUI
: /* Arithmetic with immediate opcode */
8028 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
8030 case OPC_J
... OPC_JAL
: /* Jump */
8031 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
8032 gen_compute_branch(ctx
, op
, rs
, rt
, offset
);
8034 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
8035 case OPC_BEQL
... OPC_BGTZL
:
8036 gen_compute_branch(ctx
, op
, rs
, rt
, imm
<< 2);
8038 case OPC_LB
... OPC_LWR
: /* Load and stores */
8039 case OPC_SB
... OPC_SW
:
8043 gen_ldst(ctx
, op
, rt
, rs
, imm
);
8046 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
8050 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
8054 /* Floating point (COP1). */
8059 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
8060 save_cpu_state(ctx
, 1);
8061 check_cp1_enabled(ctx
);
8062 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
8064 generate_exception_err(ctx
, EXCP_CpU
, 1);
8069 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
8070 save_cpu_state(ctx
, 1);
8071 check_cp1_enabled(ctx
);
8072 op1
= MASK_CP1(ctx
->opcode
);
8076 check_insn(env
, ctx
, ISA_MIPS32R2
);
8081 gen_cp1(ctx
, op1
, rt
, rd
);
8083 #if defined(TARGET_MIPS64)
8086 check_insn(env
, ctx
, ISA_MIPS3
);
8087 gen_cp1(ctx
, op1
, rt
, rd
);
8093 check_insn(env
, ctx
, ASE_MIPS3D
);
8096 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
8097 (rt
>> 2) & 0x7, imm
<< 2);
8104 gen_farith(ctx
, MASK_CP1_FUNC(ctx
->opcode
), rt
, rd
, sa
,
8109 generate_exception (ctx
, EXCP_RI
);
8113 generate_exception_err(ctx
, EXCP_CpU
, 1);
8123 /* COP2: Not implemented. */
8124 generate_exception_err(ctx
, EXCP_CpU
, 2);
8128 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
8129 save_cpu_state(ctx
, 1);
8130 check_cp1_enabled(ctx
);
8131 op1
= MASK_CP3(ctx
->opcode
);
8139 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
8157 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
8161 generate_exception (ctx
, EXCP_RI
);
8165 generate_exception_err(ctx
, EXCP_CpU
, 1);
8169 #if defined(TARGET_MIPS64)
8170 /* MIPS64 opcodes */
8172 case OPC_LDL
... OPC_LDR
:
8173 case OPC_SDL
... OPC_SDR
:
8178 check_insn(env
, ctx
, ISA_MIPS3
);
8180 gen_ldst(ctx
, op
, rt
, rs
, imm
);
8182 case OPC_DADDI
... OPC_DADDIU
:
8183 check_insn(env
, ctx
, ISA_MIPS3
);
8185 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
8189 check_insn(env
, ctx
, ASE_MIPS16
);
8190 /* MIPS16: Not implemented. */
8192 check_insn(env
, ctx
, ASE_MDMX
);
8193 /* MDMX: Not implemented. */
8194 default: /* Invalid */
8195 MIPS_INVAL("major opcode");
8196 generate_exception(ctx
, EXCP_RI
);
8199 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8200 int hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8201 /* Branches completion */
8202 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8203 ctx
->bstate
= BS_BRANCH
;
8204 save_cpu_state(ctx
, 0);
8205 /* FIXME: Need to clear can_do_io. */
8208 /* unconditional branch */
8209 MIPS_DEBUG("unconditional branch");
8210 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8213 /* blikely taken case */
8214 MIPS_DEBUG("blikely branch taken");
8215 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8218 /* Conditional branch */
8219 MIPS_DEBUG("conditional branch");
8221 int l1
= gen_new_label();
8223 tcg_gen_brcondi_i32(TCG_COND_NE
, bcond
, 0, l1
);
8224 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
8226 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8230 /* unconditional branch to register */
8231 MIPS_DEBUG("branch to register");
8232 tcg_gen_mov_tl(cpu_PC
, btarget
);
8236 MIPS_DEBUG("unknown branch");
8243 gen_intermediate_code_internal (CPUState
*env
, TranslationBlock
*tb
,
8247 target_ulong pc_start
;
8248 uint16_t *gen_opc_end
;
8254 if (search_pc
&& loglevel
)
8255 fprintf (logfile
, "search pc %d\n", search_pc
);
8258 /* Leave some spare opc slots for branch handling. */
8259 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
- 16;
8263 ctx
.bstate
= BS_NONE
;
8264 /* Restore delay slot state from the tb context. */
8265 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
8266 restore_cpu_state(env
, &ctx
);
8267 if (env
->user_mode_only
)
8268 ctx
.mem_idx
= MIPS_HFLAG_UM
;
8270 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
8272 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
8274 max_insns
= CF_COUNT_MASK
;
8276 if (loglevel
& CPU_LOG_TB_CPU
) {
8277 fprintf(logfile
, "------------------------------------------------\n");
8278 /* FIXME: This may print out stale hflags from env... */
8279 cpu_dump_state(env
, logfile
, fprintf
, 0);
8282 #ifdef MIPS_DEBUG_DISAS
8283 if (loglevel
& CPU_LOG_TB_IN_ASM
)
8284 fprintf(logfile
, "\ntb %p idx %d hflags %04x\n",
8285 tb
, ctx
.mem_idx
, ctx
.hflags
);
8288 while (ctx
.bstate
== BS_NONE
) {
8289 if (unlikely(!TAILQ_EMPTY(&env
->breakpoints
))) {
8290 TAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
8291 if (bp
->pc
== ctx
.pc
) {
8292 save_cpu_state(&ctx
, 1);
8293 ctx
.bstate
= BS_BRANCH
;
8294 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8295 /* Include the breakpoint location or the tb won't
8296 * be flushed when it must be. */
8298 goto done_generating
;
8304 j
= gen_opc_ptr
- gen_opc_buf
;
8308 gen_opc_instr_start
[lj
++] = 0;
8310 gen_opc_pc
[lj
] = ctx
.pc
;
8311 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
8312 gen_opc_instr_start
[lj
] = 1;
8313 gen_opc_icount
[lj
] = num_insns
;
8315 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
8317 ctx
.opcode
= ldl_code(ctx
.pc
);
8318 decode_opc(env
, &ctx
);
8322 if (env
->singlestep_enabled
)
8325 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
8328 if (gen_opc_ptr
>= gen_opc_end
)
8331 if (num_insns
>= max_insns
)
8333 #if defined (MIPS_SINGLE_STEP)
8337 if (tb
->cflags
& CF_LAST_IO
)
8339 if (env
->singlestep_enabled
) {
8340 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
8341 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8343 switch (ctx
.bstate
) {
8345 gen_helper_interrupt_restart();
8346 gen_goto_tb(&ctx
, 0, ctx
.pc
);
8349 save_cpu_state(&ctx
, 0);
8350 gen_goto_tb(&ctx
, 0, ctx
.pc
);
8353 gen_helper_interrupt_restart();
8362 gen_icount_end(tb
, num_insns
);
8363 *gen_opc_ptr
= INDEX_op_end
;
8365 j
= gen_opc_ptr
- gen_opc_buf
;
8368 gen_opc_instr_start
[lj
++] = 0;
8370 tb
->size
= ctx
.pc
- pc_start
;
8371 tb
->icount
= num_insns
;
8374 #if defined MIPS_DEBUG_DISAS
8375 if (loglevel
& CPU_LOG_TB_IN_ASM
)
8376 fprintf(logfile
, "\n");
8378 if (loglevel
& CPU_LOG_TB_IN_ASM
) {
8379 fprintf(logfile
, "IN: %s\n", lookup_symbol(pc_start
));
8380 target_disas(logfile
, pc_start
, ctx
.pc
- pc_start
, 0);
8381 fprintf(logfile
, "\n");
8383 if (loglevel
& CPU_LOG_TB_CPU
) {
8384 fprintf(logfile
, "---------------- %d %08x\n", ctx
.bstate
, ctx
.hflags
);
8389 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
8391 gen_intermediate_code_internal(env
, tb
, 0);
8394 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
8396 gen_intermediate_code_internal(env
, tb
, 1);
8399 static void fpu_dump_state(CPUState
*env
, FILE *f
,
8400 int (*fpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8404 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
8406 #define printfpr(fp) \
8409 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu: %13g\n", \
8410 (fp)->w[FP_ENDIAN_IDX], (fp)->d, (fp)->fd, \
8411 (fp)->fs[FP_ENDIAN_IDX], (fp)->fs[!FP_ENDIAN_IDX]); \
8414 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
8415 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
8416 fpu_fprintf(f, "w:%08x d:%016lx fd:%13g fs:%13g psu:%13g\n", \
8417 tmp.w[FP_ENDIAN_IDX], tmp.d, tmp.fd, \
8418 tmp.fs[FP_ENDIAN_IDX], tmp.fs[!FP_ENDIAN_IDX]); \
8423 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
8424 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
, env
->active_fpu
.fp_status
,
8425 get_float_exception_flags(&env
->active_fpu
.fp_status
));
8426 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
8427 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
8428 printfpr(&env
->active_fpu
.fpr
[i
]);
8434 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8435 /* Debug help: The architecture requires 32bit code to maintain proper
8436 sign-extended values on 64bit machines. */
8438 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
8441 cpu_mips_check_sign_extensions (CPUState
*env
, FILE *f
,
8442 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8447 if (!SIGN_EXT_P(env
->active_tc
.PC
))
8448 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
8449 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
8450 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
8451 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
8452 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
8453 if (!SIGN_EXT_P(env
->btarget
))
8454 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
8456 for (i
= 0; i
< 32; i
++) {
8457 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
8458 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
8461 if (!SIGN_EXT_P(env
->CP0_EPC
))
8462 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
8463 if (!SIGN_EXT_P(env
->CP0_LLAddr
))
8464 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->CP0_LLAddr
);
8468 void cpu_dump_state (CPUState
*env
, FILE *f
,
8469 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
8474 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
" LO=0x" TARGET_FMT_lx
" ds %04x " TARGET_FMT_lx
" %d\n",
8475 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
8476 env
->hflags
, env
->btarget
, env
->bcond
);
8477 for (i
= 0; i
< 32; i
++) {
8479 cpu_fprintf(f
, "GPR%02d:", i
);
8480 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
8482 cpu_fprintf(f
, "\n");
8485 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
8486 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
8487 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
8488 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
8489 if (env
->hflags
& MIPS_HFLAG_FPU
)
8490 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
8491 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
8492 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
8496 static void mips_tcg_init(void)
8501 /* Initialize various static tables. */
8505 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
8506 for (i
= 0; i
< 32; i
++)
8507 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
8508 offsetof(CPUState
, active_tc
.gpr
[i
]),
8510 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
8511 offsetof(CPUState
, active_tc
.PC
), "PC");
8512 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
8513 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
8514 offsetof(CPUState
, active_tc
.HI
[i
]),
8516 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
8517 offsetof(CPUState
, active_tc
.LO
[i
]),
8519 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
8520 offsetof(CPUState
, active_tc
.ACX
[i
]),
8523 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
8524 offsetof(CPUState
, active_tc
.DSPControl
),
8526 bcond
= tcg_global_mem_new_i32(TCG_AREG0
,
8527 offsetof(CPUState
, bcond
), "bcond");
8528 btarget
= tcg_global_mem_new(TCG_AREG0
,
8529 offsetof(CPUState
, btarget
), "btarget");
8530 for (i
= 0; i
< 32; i
++)
8531 fpu_fpr32
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
8532 offsetof(CPUState
, active_fpu
.fpr
[i
].w
[FP_ENDIAN_IDX
]),
8534 for (i
= 0; i
< 32; i
++)
8535 fpu_fpr64
[i
] = tcg_global_mem_new_i64(TCG_AREG0
,
8536 offsetof(CPUState
, active_fpu
.fpr
[i
]),
8538 for (i
= 0; i
< 32; i
++)
8539 fpu_fpr32h
[i
] = tcg_global_mem_new_i32(TCG_AREG0
,
8540 offsetof(CPUState
, active_fpu
.fpr
[i
].w
[!FP_ENDIAN_IDX
]),
8542 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
8543 offsetof(CPUState
, active_fpu
.fcr0
),
8545 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
8546 offsetof(CPUState
, active_fpu
.fcr31
),
8549 /* register helpers */
8550 #define GEN_HELPER 2
8556 #include "translate_init.c"
8558 CPUMIPSState
*cpu_mips_init (const char *cpu_model
)
8561 const mips_def_t
*def
;
8563 def
= cpu_mips_find_by_name(cpu_model
);
8566 env
= qemu_mallocz(sizeof(CPUMIPSState
));
8569 env
->cpu_model
= def
;
8572 env
->cpu_model_str
= cpu_model
;
8578 void cpu_reset (CPUMIPSState
*env
)
8580 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
8585 #if defined(CONFIG_USER_ONLY)
8586 env
->user_mode_only
= 1;
8588 if (env
->user_mode_only
) {
8589 env
->hflags
= MIPS_HFLAG_UM
;
8591 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
8592 /* If the exception was raised from a delay slot,
8593 come back to the jump. */
8594 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
8596 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
8598 env
->active_tc
.PC
= (int32_t)0xBFC00000;
8600 /* SMP not implemented */
8601 env
->CP0_EBase
= 0x80000000;
8602 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
8603 /* vectored interrupts not implemented, timer on int 7,
8604 no performance counters. */
8605 env
->CP0_IntCtl
= 0xe0000000;
8609 for (i
= 0; i
< 7; i
++) {
8610 env
->CP0_WatchLo
[i
] = 0;
8611 env
->CP0_WatchHi
[i
] = 0x80000000;
8613 env
->CP0_WatchLo
[7] = 0;
8614 env
->CP0_WatchHi
[7] = 0;
8616 /* Count register increments in debug mode, EJTAG version 1 */
8617 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
8618 env
->hflags
= MIPS_HFLAG_CP0
;
8620 env
->exception_index
= EXCP_NONE
;
8621 cpu_mips_register(env
, env
->cpu_model
);
8624 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
8625 unsigned long searched_pc
, int pc_pos
, void *puc
)
8627 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
8628 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
8629 env
->hflags
|= gen_opc_hflags
[pc_pos
];