2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
33 #include "qemu-common.h"
39 //#define MIPS_DEBUG_DISAS
40 //#define MIPS_DEBUG_SIGN_EXTENSIONS
42 /* MIPS major opcodes */
43 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
46 /* indirect opcode tables */
47 OPC_SPECIAL
= (0x00 << 26),
48 OPC_REGIMM
= (0x01 << 26),
49 OPC_CP0
= (0x10 << 26),
50 OPC_CP1
= (0x11 << 26),
51 OPC_CP2
= (0x12 << 26),
52 OPC_CP3
= (0x13 << 26),
53 OPC_SPECIAL2
= (0x1C << 26),
54 OPC_SPECIAL3
= (0x1F << 26),
55 /* arithmetic with immediate */
56 OPC_ADDI
= (0x08 << 26),
57 OPC_ADDIU
= (0x09 << 26),
58 OPC_SLTI
= (0x0A << 26),
59 OPC_SLTIU
= (0x0B << 26),
60 /* logic with immediate */
61 OPC_ANDI
= (0x0C << 26),
62 OPC_ORI
= (0x0D << 26),
63 OPC_XORI
= (0x0E << 26),
64 OPC_LUI
= (0x0F << 26),
65 /* arithmetic with immediate */
66 OPC_DADDI
= (0x18 << 26),
67 OPC_DADDIU
= (0x19 << 26),
68 /* Jump and branches */
70 OPC_JAL
= (0x03 << 26),
71 OPC_JALS
= OPC_JAL
| 0x5,
72 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
73 OPC_BEQL
= (0x14 << 26),
74 OPC_BNE
= (0x05 << 26),
75 OPC_BNEL
= (0x15 << 26),
76 OPC_BLEZ
= (0x06 << 26),
77 OPC_BLEZL
= (0x16 << 26),
78 OPC_BGTZ
= (0x07 << 26),
79 OPC_BGTZL
= (0x17 << 26),
80 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
81 OPC_JALXS
= OPC_JALX
| 0x5,
83 OPC_LDL
= (0x1A << 26),
84 OPC_LDR
= (0x1B << 26),
85 OPC_LB
= (0x20 << 26),
86 OPC_LH
= (0x21 << 26),
87 OPC_LWL
= (0x22 << 26),
88 OPC_LW
= (0x23 << 26),
89 OPC_LWPC
= OPC_LW
| 0x5,
90 OPC_LBU
= (0x24 << 26),
91 OPC_LHU
= (0x25 << 26),
92 OPC_LWR
= (0x26 << 26),
93 OPC_LWU
= (0x27 << 26),
94 OPC_SB
= (0x28 << 26),
95 OPC_SH
= (0x29 << 26),
96 OPC_SWL
= (0x2A << 26),
97 OPC_SW
= (0x2B << 26),
98 OPC_SDL
= (0x2C << 26),
99 OPC_SDR
= (0x2D << 26),
100 OPC_SWR
= (0x2E << 26),
101 OPC_LL
= (0x30 << 26),
102 OPC_LLD
= (0x34 << 26),
103 OPC_LD
= (0x37 << 26),
104 OPC_LDPC
= OPC_LD
| 0x5,
105 OPC_SC
= (0x38 << 26),
106 OPC_SCD
= (0x3C << 26),
107 OPC_SD
= (0x3F << 26),
108 /* Floating point load/store */
109 OPC_LWC1
= (0x31 << 26),
110 OPC_LWC2
= (0x32 << 26),
111 OPC_LDC1
= (0x35 << 26),
112 OPC_LDC2
= (0x36 << 26),
113 OPC_SWC1
= (0x39 << 26),
114 OPC_SWC2
= (0x3A << 26),
115 OPC_SDC1
= (0x3D << 26),
116 OPC_SDC2
= (0x3E << 26),
117 /* MDMX ASE specific */
118 OPC_MDMX
= (0x1E << 26),
119 /* Cache and prefetch */
120 OPC_CACHE
= (0x2F << 26),
121 OPC_PREF
= (0x33 << 26),
122 /* Reserved major opcode */
123 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
126 /* MIPS special opcodes */
127 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
131 OPC_SLL
= 0x00 | OPC_SPECIAL
,
132 /* NOP is SLL r0, r0, 0 */
133 /* SSNOP is SLL r0, r0, 1 */
134 /* EHB is SLL r0, r0, 3 */
135 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
136 OPC_ROTR
= OPC_SRL
| (1 << 21),
137 OPC_SRA
= 0x03 | OPC_SPECIAL
,
138 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
139 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
140 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
141 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
142 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
143 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
144 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
145 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
146 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
147 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
148 OPC_DROTR
= OPC_DSRL
| (1 << 21),
149 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
150 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
151 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
152 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
153 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
154 /* Multiplication / division */
155 OPC_MULT
= 0x18 | OPC_SPECIAL
,
156 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
157 OPC_DIV
= 0x1A | OPC_SPECIAL
,
158 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
159 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
160 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
161 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
162 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
163 /* 2 registers arithmetic / logic */
164 OPC_ADD
= 0x20 | OPC_SPECIAL
,
165 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
166 OPC_SUB
= 0x22 | OPC_SPECIAL
,
167 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
168 OPC_AND
= 0x24 | OPC_SPECIAL
,
169 OPC_OR
= 0x25 | OPC_SPECIAL
,
170 OPC_XOR
= 0x26 | OPC_SPECIAL
,
171 OPC_NOR
= 0x27 | OPC_SPECIAL
,
172 OPC_SLT
= 0x2A | OPC_SPECIAL
,
173 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
174 OPC_DADD
= 0x2C | OPC_SPECIAL
,
175 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
176 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
177 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
179 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
180 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
181 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
182 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
184 OPC_TGE
= 0x30 | OPC_SPECIAL
,
185 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
186 OPC_TLT
= 0x32 | OPC_SPECIAL
,
187 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
188 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
189 OPC_TNE
= 0x36 | OPC_SPECIAL
,
190 /* HI / LO registers load & stores */
191 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
192 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
193 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
194 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
195 /* Conditional moves */
196 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
197 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
199 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
202 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
203 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
204 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
205 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
206 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
208 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
209 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
210 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
211 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
212 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
213 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
214 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
217 /* Multiplication variants of the vr54xx. */
218 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
221 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
222 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
223 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
224 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
225 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
226 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
227 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
228 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
229 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
230 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
231 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
232 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
233 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
234 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
237 /* REGIMM (rt field) opcodes */
238 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
241 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
242 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
243 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
244 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
245 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
246 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
247 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
248 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
249 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
250 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
251 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
252 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
253 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
254 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
255 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
256 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
257 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
260 /* Special2 opcodes */
261 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
264 /* Multiply & xxx operations */
265 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
266 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
267 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
268 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
269 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
271 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
272 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
273 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
274 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
276 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
279 /* Special3 opcodes */
280 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
283 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
284 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
285 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
286 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
287 OPC_INS
= 0x04 | OPC_SPECIAL3
,
288 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
289 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
290 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
291 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
292 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
293 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
294 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
295 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
299 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
302 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
303 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
304 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
308 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
311 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
312 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
315 /* Coprocessor 0 (rs field) */
316 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
319 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
320 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
321 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
322 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
323 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
324 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
325 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
326 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
327 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
328 OPC_C0
= (0x10 << 21) | OPC_CP0
,
329 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
330 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
334 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
337 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
338 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
339 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
340 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
341 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
342 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
345 /* Coprocessor 0 (with rs == C0) */
346 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
349 OPC_TLBR
= 0x01 | OPC_C0
,
350 OPC_TLBWI
= 0x02 | OPC_C0
,
351 OPC_TLBWR
= 0x06 | OPC_C0
,
352 OPC_TLBP
= 0x08 | OPC_C0
,
353 OPC_RFE
= 0x10 | OPC_C0
,
354 OPC_ERET
= 0x18 | OPC_C0
,
355 OPC_DERET
= 0x1F | OPC_C0
,
356 OPC_WAIT
= 0x20 | OPC_C0
,
359 /* Coprocessor 1 (rs field) */
360 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
362 /* Values for the fmt field in FP instructions */
364 /* 0 - 15 are reserved */
365 FMT_S
= 16, /* single fp */
366 FMT_D
= 17, /* double fp */
367 FMT_E
= 18, /* extended fp */
368 FMT_Q
= 19, /* quad fp */
369 FMT_W
= 20, /* 32-bit fixed */
370 FMT_L
= 21, /* 64-bit fixed */
371 FMT_PS
= 22, /* paired single fp */
372 /* 23 - 31 are reserved */
376 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
377 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
378 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
379 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
380 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
381 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
382 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
383 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
384 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
385 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
386 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
387 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
388 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
389 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
390 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
391 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
392 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
393 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
396 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
397 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
400 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
401 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
402 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
403 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
407 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
408 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
412 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
413 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
416 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
419 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
420 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
421 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
422 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
423 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
424 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
425 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
426 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
427 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
430 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
433 OPC_LWXC1
= 0x00 | OPC_CP3
,
434 OPC_LDXC1
= 0x01 | OPC_CP3
,
435 OPC_LUXC1
= 0x05 | OPC_CP3
,
436 OPC_SWXC1
= 0x08 | OPC_CP3
,
437 OPC_SDXC1
= 0x09 | OPC_CP3
,
438 OPC_SUXC1
= 0x0D | OPC_CP3
,
439 OPC_PREFX
= 0x0F | OPC_CP3
,
440 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
441 OPC_MADD_S
= 0x20 | OPC_CP3
,
442 OPC_MADD_D
= 0x21 | OPC_CP3
,
443 OPC_MADD_PS
= 0x26 | OPC_CP3
,
444 OPC_MSUB_S
= 0x28 | OPC_CP3
,
445 OPC_MSUB_D
= 0x29 | OPC_CP3
,
446 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
447 OPC_NMADD_S
= 0x30 | OPC_CP3
,
448 OPC_NMADD_D
= 0x31 | OPC_CP3
,
449 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
450 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
451 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
452 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
455 /* global register indices */
456 static TCGv_ptr cpu_env
;
457 static TCGv cpu_gpr
[32], cpu_PC
;
458 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
459 static TCGv cpu_dspctrl
, btarget
, bcond
;
460 static TCGv_i32 hflags
;
461 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
463 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
465 #include "gen-icount.h"
467 #define gen_helper_0i(name, arg) do { \
468 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
469 gen_helper_##name(helper_tmp); \
470 tcg_temp_free_i32(helper_tmp); \
473 #define gen_helper_1i(name, arg1, arg2) do { \
474 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
475 gen_helper_##name(arg1, helper_tmp); \
476 tcg_temp_free_i32(helper_tmp); \
479 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
480 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
481 gen_helper_##name(arg1, arg2, helper_tmp); \
482 tcg_temp_free_i32(helper_tmp); \
485 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
486 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
487 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
488 tcg_temp_free_i32(helper_tmp); \
491 typedef struct DisasContext
{
492 struct TranslationBlock
*tb
;
493 target_ulong pc
, saved_pc
;
495 int singlestep_enabled
;
496 /* Routine used to access memory */
498 uint32_t hflags
, saved_hflags
;
500 target_ulong btarget
;
504 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
505 * exception condition */
506 BS_STOP
= 1, /* We want to stop translation for any reason */
507 BS_BRANCH
= 2, /* We reached a branch condition */
508 BS_EXCP
= 3, /* We reached an exception condition */
511 static const char *regnames
[] =
512 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
513 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
514 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
515 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
517 static const char *regnames_HI
[] =
518 { "HI0", "HI1", "HI2", "HI3", };
520 static const char *regnames_LO
[] =
521 { "LO0", "LO1", "LO2", "LO3", };
523 static const char *regnames_ACX
[] =
524 { "ACX0", "ACX1", "ACX2", "ACX3", };
526 static const char *fregnames
[] =
527 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
528 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
529 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
530 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
532 #ifdef MIPS_DEBUG_DISAS
533 #define MIPS_DEBUG(fmt, ...) \
534 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
535 TARGET_FMT_lx ": %08x " fmt "\n", \
536 ctx->pc, ctx->opcode , ## __VA_ARGS__)
537 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
539 #define MIPS_DEBUG(fmt, ...) do { } while(0)
540 #define LOG_DISAS(...) do { } while (0)
543 #define MIPS_INVAL(op) \
545 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
546 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
549 /* General purpose registers moves. */
550 static inline void gen_load_gpr (TCGv t
, int reg
)
553 tcg_gen_movi_tl(t
, 0);
555 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
558 static inline void gen_store_gpr (TCGv t
, int reg
)
561 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
564 /* Moves to/from ACX register. */
565 static inline void gen_load_ACX (TCGv t
, int reg
)
567 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
570 static inline void gen_store_ACX (TCGv t
, int reg
)
572 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
575 /* Moves to/from shadow registers. */
576 static inline void gen_load_srsgpr (int from
, int to
)
578 TCGv t0
= tcg_temp_new();
581 tcg_gen_movi_tl(t0
, 0);
583 TCGv_i32 t2
= tcg_temp_new_i32();
584 TCGv_ptr addr
= tcg_temp_new_ptr();
586 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
587 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
588 tcg_gen_andi_i32(t2
, t2
, 0xf);
589 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
590 tcg_gen_ext_i32_ptr(addr
, t2
);
591 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
593 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
594 tcg_temp_free_ptr(addr
);
595 tcg_temp_free_i32(t2
);
597 gen_store_gpr(t0
, to
);
601 static inline void gen_store_srsgpr (int from
, int to
)
604 TCGv t0
= tcg_temp_new();
605 TCGv_i32 t2
= tcg_temp_new_i32();
606 TCGv_ptr addr
= tcg_temp_new_ptr();
608 gen_load_gpr(t0
, from
);
609 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUState
, CP0_SRSCtl
));
610 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
611 tcg_gen_andi_i32(t2
, t2
, 0xf);
612 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
613 tcg_gen_ext_i32_ptr(addr
, t2
);
614 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
616 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
617 tcg_temp_free_ptr(addr
);
618 tcg_temp_free_i32(t2
);
623 /* Floating point register moves. */
624 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
626 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
629 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
631 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
634 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
636 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
639 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
641 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
644 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
646 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
647 tcg_gen_ld_i64(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].d
));
649 TCGv_i32 t0
= tcg_temp_new_i32();
650 TCGv_i32 t1
= tcg_temp_new_i32();
651 gen_load_fpr32(t0
, reg
& ~1);
652 gen_load_fpr32(t1
, reg
| 1);
653 tcg_gen_concat_i32_i64(t
, t0
, t1
);
654 tcg_temp_free_i32(t0
);
655 tcg_temp_free_i32(t1
);
659 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
661 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
662 tcg_gen_st_i64(t
, cpu_env
, offsetof(CPUState
, active_fpu
.fpr
[reg
].d
));
664 TCGv_i64 t0
= tcg_temp_new_i64();
665 TCGv_i32 t1
= tcg_temp_new_i32();
666 tcg_gen_trunc_i64_i32(t1
, t
);
667 gen_store_fpr32(t1
, reg
& ~1);
668 tcg_gen_shri_i64(t0
, t
, 32);
669 tcg_gen_trunc_i64_i32(t1
, t0
);
670 gen_store_fpr32(t1
, reg
| 1);
671 tcg_temp_free_i32(t1
);
672 tcg_temp_free_i64(t0
);
676 static inline int get_fp_bit (int cc
)
685 static inline void gen_save_pc(target_ulong pc
)
687 tcg_gen_movi_tl(cpu_PC
, pc
);
690 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
692 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
693 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
694 gen_save_pc(ctx
->pc
);
695 ctx
->saved_pc
= ctx
->pc
;
697 if (ctx
->hflags
!= ctx
->saved_hflags
) {
698 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
699 ctx
->saved_hflags
= ctx
->hflags
;
700 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
706 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
712 static inline void restore_cpu_state (CPUState
*env
, DisasContext
*ctx
)
714 ctx
->saved_hflags
= ctx
->hflags
;
715 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
721 ctx
->btarget
= env
->btarget
;
727 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
729 TCGv_i32 texcp
= tcg_const_i32(excp
);
730 TCGv_i32 terr
= tcg_const_i32(err
);
731 save_cpu_state(ctx
, 1);
732 gen_helper_raise_exception_err(texcp
, terr
);
733 tcg_temp_free_i32(terr
);
734 tcg_temp_free_i32(texcp
);
738 generate_exception (DisasContext
*ctx
, int excp
)
740 save_cpu_state(ctx
, 1);
741 gen_helper_0i(raise_exception
, excp
);
744 /* Addresses computation */
745 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
747 tcg_gen_add_tl(ret
, arg0
, arg1
);
749 #if defined(TARGET_MIPS64)
750 /* For compatibility with 32-bit code, data reference in user mode
751 with Status_UX = 0 should be casted to 32-bit and sign extended.
752 See the MIPS64 PRA manual, section 4.10. */
753 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
754 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
755 tcg_gen_ext32s_i64(ret
, ret
);
760 static inline void check_cp0_enabled(DisasContext
*ctx
)
762 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
763 generate_exception_err(ctx
, EXCP_CpU
, 0);
766 static inline void check_cp1_enabled(DisasContext
*ctx
)
768 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
769 generate_exception_err(ctx
, EXCP_CpU
, 1);
772 /* Verify that the processor is running with COP1X instructions enabled.
773 This is associated with the nabla symbol in the MIPS32 and MIPS64
776 static inline void check_cop1x(DisasContext
*ctx
)
778 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
779 generate_exception(ctx
, EXCP_RI
);
782 /* Verify that the processor is running with 64-bit floating-point
783 operations enabled. */
785 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
787 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
788 generate_exception(ctx
, EXCP_RI
);
792 * Verify if floating point register is valid; an operation is not defined
793 * if bit 0 of any register specification is set and the FR bit in the
794 * Status register equals zero, since the register numbers specify an
795 * even-odd pair of adjacent coprocessor general registers. When the FR bit
796 * in the Status register equals one, both even and odd register numbers
797 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
799 * Multiple 64 bit wide registers can be checked by calling
800 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
802 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
804 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
805 generate_exception(ctx
, EXCP_RI
);
808 /* This code generates a "reserved instruction" exception if the
809 CPU does not support the instruction set corresponding to flags. */
810 static inline void check_insn(CPUState
*env
, DisasContext
*ctx
, int flags
)
812 if (unlikely(!(env
->insn_flags
& flags
)))
813 generate_exception(ctx
, EXCP_RI
);
816 /* This code generates a "reserved instruction" exception if 64-bit
817 instructions are not enabled. */
818 static inline void check_mips_64(DisasContext
*ctx
)
820 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
821 generate_exception(ctx
, EXCP_RI
);
824 /* Define small wrappers for gen_load_fpr* so that we have a uniform
825 calling interface for 32 and 64-bit FPRs. No sense in changing
826 all callers for gen_load_fpr32 when we need the CTX parameter for
828 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
829 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
830 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
831 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
832 int ft, int fs, int cc) \
834 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
835 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
838 check_cp1_64bitmode(ctx); \
844 check_cp1_registers(ctx, fs | ft); \
852 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
853 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
855 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
856 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
857 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
858 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
859 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
860 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
861 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
862 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
863 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
864 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
865 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
866 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
867 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
868 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
869 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
870 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
873 tcg_temp_free_i##bits (fp0); \
874 tcg_temp_free_i##bits (fp1); \
877 FOP_CONDS(, 0, d
, FMT_D
, 64)
878 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
879 FOP_CONDS(, 0, s
, FMT_S
, 32)
880 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
881 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
882 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
884 #undef gen_ldcmp_fpr32
885 #undef gen_ldcmp_fpr64
887 /* load/store instructions. */
888 #define OP_LD(insn,fname) \
889 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
891 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
898 #if defined(TARGET_MIPS64)
904 #define OP_ST(insn,fname) \
905 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
907 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
912 #if defined(TARGET_MIPS64)
917 #ifdef CONFIG_USER_ONLY
918 #define OP_LD_ATOMIC(insn,fname) \
919 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
921 TCGv t0 = tcg_temp_new(); \
922 tcg_gen_mov_tl(t0, arg1); \
923 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
924 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
925 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUState, llval)); \
929 #define OP_LD_ATOMIC(insn,fname) \
930 static inline void op_ldst_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
932 gen_helper_2i(insn, ret, arg1, ctx->mem_idx); \
935 OP_LD_ATOMIC(ll
,ld32s
);
936 #if defined(TARGET_MIPS64)
937 OP_LD_ATOMIC(lld
,ld64
);
941 #ifdef CONFIG_USER_ONLY
942 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
943 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
945 TCGv t0 = tcg_temp_new(); \
946 int l1 = gen_new_label(); \
947 int l2 = gen_new_label(); \
949 tcg_gen_andi_tl(t0, arg2, almask); \
950 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
951 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUState, CP0_BadVAddr)); \
952 generate_exception(ctx, EXCP_AdES); \
954 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUState, lladdr)); \
955 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
956 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
957 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUState, llreg)); \
958 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUState, llnewval)); \
959 gen_helper_0i(raise_exception, EXCP_SC); \
961 tcg_gen_movi_tl(t0, 0); \
962 gen_store_gpr(t0, rt); \
966 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
967 static inline void op_ldst_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
969 TCGv t0 = tcg_temp_new(); \
970 gen_helper_3i(insn, t0, arg1, arg2, ctx->mem_idx); \
971 gen_store_gpr(t0, rt); \
975 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
976 #if defined(TARGET_MIPS64)
977 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
981 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
982 int base
, int16_t offset
)
985 tcg_gen_movi_tl(addr
, offset
);
986 } else if (offset
== 0) {
987 gen_load_gpr(addr
, base
);
989 tcg_gen_movi_tl(addr
, offset
);
990 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
994 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
996 target_ulong pc
= ctx
->pc
;
998 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
999 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1004 pc
&= ~(target_ulong
)3;
1008 /* Load and store */
1009 static void gen_ldst (DisasContext
*ctx
, uint32_t opc
, int rt
,
1010 int base
, int16_t offset
)
1012 const char *opn
= "ldst";
1013 TCGv t0
= tcg_temp_new();
1014 TCGv t1
= tcg_temp_new();
1016 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1017 /* Don't do NOP if destination is zero: we must perform the actual
1020 #if defined(TARGET_MIPS64)
1022 save_cpu_state(ctx
, 0);
1023 op_ldst_lwu(t0
, t0
, ctx
);
1024 gen_store_gpr(t0
, rt
);
1028 save_cpu_state(ctx
, 0);
1029 op_ldst_ld(t0
, t0
, ctx
);
1030 gen_store_gpr(t0
, rt
);
1034 save_cpu_state(ctx
, 0);
1035 op_ldst_lld(t0
, t0
, ctx
);
1036 gen_store_gpr(t0
, rt
);
1040 save_cpu_state(ctx
, 0);
1041 gen_load_gpr(t1
, rt
);
1042 op_ldst_sd(t1
, t0
, ctx
);
1046 save_cpu_state(ctx
, 1);
1047 gen_load_gpr(t1
, rt
);
1048 gen_helper_3i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1049 gen_store_gpr(t1
, rt
);
1053 save_cpu_state(ctx
, 1);
1054 gen_load_gpr(t1
, rt
);
1055 gen_helper_2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1059 save_cpu_state(ctx
, 1);
1060 gen_load_gpr(t1
, rt
);
1061 gen_helper_3i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1062 gen_store_gpr(t1
, rt
);
1066 save_cpu_state(ctx
, 1);
1067 gen_load_gpr(t1
, rt
);
1068 gen_helper_2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1072 save_cpu_state(ctx
, 1);
1073 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1074 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1075 op_ldst_ld(t0
, t0
, ctx
);
1076 gen_store_gpr(t0
, rt
);
1080 save_cpu_state(ctx
, 1);
1081 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1082 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1083 op_ldst_lw(t0
, t0
, ctx
);
1084 gen_store_gpr(t0
, rt
);
1087 save_cpu_state(ctx
, 0);
1088 op_ldst_lw(t0
, t0
, ctx
);
1089 gen_store_gpr(t0
, rt
);
1093 save_cpu_state(ctx
, 0);
1094 gen_load_gpr(t1
, rt
);
1095 op_ldst_sw(t1
, t0
, ctx
);
1099 save_cpu_state(ctx
, 0);
1100 op_ldst_lh(t0
, t0
, ctx
);
1101 gen_store_gpr(t0
, rt
);
1105 save_cpu_state(ctx
, 0);
1106 gen_load_gpr(t1
, rt
);
1107 op_ldst_sh(t1
, t0
, ctx
);
1111 save_cpu_state(ctx
, 0);
1112 op_ldst_lhu(t0
, t0
, ctx
);
1113 gen_store_gpr(t0
, rt
);
1117 save_cpu_state(ctx
, 0);
1118 op_ldst_lb(t0
, t0
, ctx
);
1119 gen_store_gpr(t0
, rt
);
1123 save_cpu_state(ctx
, 0);
1124 gen_load_gpr(t1
, rt
);
1125 op_ldst_sb(t1
, t0
, ctx
);
1129 save_cpu_state(ctx
, 0);
1130 op_ldst_lbu(t0
, t0
, ctx
);
1131 gen_store_gpr(t0
, rt
);
1135 save_cpu_state(ctx
, 1);
1136 gen_load_gpr(t1
, rt
);
1137 gen_helper_3i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1138 gen_store_gpr(t1
, rt
);
1142 save_cpu_state(ctx
, 1);
1143 gen_load_gpr(t1
, rt
);
1144 gen_helper_2i(swl
, t1
, t0
, ctx
->mem_idx
);
1148 save_cpu_state(ctx
, 1);
1149 gen_load_gpr(t1
, rt
);
1150 gen_helper_3i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1151 gen_store_gpr(t1
, rt
);
1155 save_cpu_state(ctx
, 1);
1156 gen_load_gpr(t1
, rt
);
1157 gen_helper_2i(swr
, t1
, t0
, ctx
->mem_idx
);
1161 save_cpu_state(ctx
, 1);
1162 op_ldst_ll(t0
, t0
, ctx
);
1163 gen_store_gpr(t0
, rt
);
1167 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1172 /* Store conditional */
1173 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1174 int base
, int16_t offset
)
1176 const char *opn
= "st_cond";
1179 t0
= tcg_temp_local_new();
1181 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1182 /* Don't do NOP if destination is zero: we must perform the actual
1185 t1
= tcg_temp_local_new();
1186 gen_load_gpr(t1
, rt
);
1188 #if defined(TARGET_MIPS64)
1190 save_cpu_state(ctx
, 0);
1191 op_ldst_scd(t1
, t0
, rt
, ctx
);
1196 save_cpu_state(ctx
, 1);
1197 op_ldst_sc(t1
, t0
, rt
, ctx
);
1201 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1206 /* Load and store */
1207 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1208 int base
, int16_t offset
)
1210 const char *opn
= "flt_ldst";
1211 TCGv t0
= tcg_temp_new();
1213 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1214 /* Don't do NOP if destination is zero: we must perform the actual
1219 TCGv_i32 fp0
= tcg_temp_new_i32();
1221 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1222 tcg_gen_trunc_tl_i32(fp0
, t0
);
1223 gen_store_fpr32(fp0
, ft
);
1224 tcg_temp_free_i32(fp0
);
1230 TCGv_i32 fp0
= tcg_temp_new_i32();
1231 TCGv t1
= tcg_temp_new();
1233 gen_load_fpr32(fp0
, ft
);
1234 tcg_gen_extu_i32_tl(t1
, fp0
);
1235 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1237 tcg_temp_free_i32(fp0
);
1243 TCGv_i64 fp0
= tcg_temp_new_i64();
1245 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1246 gen_store_fpr64(ctx
, fp0
, ft
);
1247 tcg_temp_free_i64(fp0
);
1253 TCGv_i64 fp0
= tcg_temp_new_i64();
1255 gen_load_fpr64(ctx
, fp0
, ft
);
1256 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1257 tcg_temp_free_i64(fp0
);
1263 generate_exception(ctx
, EXCP_RI
);
1266 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1271 static void gen_cop1_ldst(CPUState
*env
, DisasContext
*ctx
,
1272 uint32_t op
, int rt
, int rs
, int16_t imm
)
1274 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1275 check_cp1_enabled(ctx
);
1276 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1278 generate_exception_err(ctx
, EXCP_CpU
, 1);
1282 /* Arithmetic with immediate operand */
1283 static void gen_arith_imm (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1284 int rt
, int rs
, int16_t imm
)
1286 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1287 const char *opn
= "imm arith";
1289 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1290 /* If no destination, treat it as a NOP.
1291 For addi, we must generate the overflow exception when needed. */
1298 TCGv t0
= tcg_temp_local_new();
1299 TCGv t1
= tcg_temp_new();
1300 TCGv t2
= tcg_temp_new();
1301 int l1
= gen_new_label();
1303 gen_load_gpr(t1
, rs
);
1304 tcg_gen_addi_tl(t0
, t1
, uimm
);
1305 tcg_gen_ext32s_tl(t0
, t0
);
1307 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1308 tcg_gen_xori_tl(t2
, t0
, uimm
);
1309 tcg_gen_and_tl(t1
, t1
, t2
);
1311 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1313 /* operands of same sign, result different sign */
1314 generate_exception(ctx
, EXCP_OVERFLOW
);
1316 tcg_gen_ext32s_tl(t0
, t0
);
1317 gen_store_gpr(t0
, rt
);
1324 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1325 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1327 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1331 #if defined(TARGET_MIPS64)
1334 TCGv t0
= tcg_temp_local_new();
1335 TCGv t1
= tcg_temp_new();
1336 TCGv t2
= tcg_temp_new();
1337 int l1
= gen_new_label();
1339 gen_load_gpr(t1
, rs
);
1340 tcg_gen_addi_tl(t0
, t1
, uimm
);
1342 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1343 tcg_gen_xori_tl(t2
, t0
, uimm
);
1344 tcg_gen_and_tl(t1
, t1
, t2
);
1346 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1348 /* operands of same sign, result different sign */
1349 generate_exception(ctx
, EXCP_OVERFLOW
);
1351 gen_store_gpr(t0
, rt
);
1358 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1360 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1366 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1369 /* Logic with immediate operand */
1370 static void gen_logic_imm (CPUState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1373 const char *opn
= "imm logic";
1376 /* If no destination, treat it as a NOP. */
1380 uimm
= (uint16_t)imm
;
1383 if (likely(rs
!= 0))
1384 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1386 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1391 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1393 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1397 if (likely(rs
!= 0))
1398 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1400 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1404 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1408 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1411 /* Set on less than with immediate operand */
1412 static void gen_slt_imm (CPUState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1414 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1415 const char *opn
= "imm arith";
1419 /* If no destination, treat it as a NOP. */
1423 t0
= tcg_temp_new();
1424 gen_load_gpr(t0
, rs
);
1427 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1431 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1435 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1439 /* Shifts with immediate operand */
1440 static void gen_shift_imm(CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1441 int rt
, int rs
, int16_t imm
)
1443 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1444 const char *opn
= "imm shift";
1448 /* If no destination, treat it as a NOP. */
1453 t0
= tcg_temp_new();
1454 gen_load_gpr(t0
, rs
);
1457 tcg_gen_shli_tl(t0
, t0
, uimm
);
1458 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1462 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1467 tcg_gen_ext32u_tl(t0
, t0
);
1468 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1470 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1476 TCGv_i32 t1
= tcg_temp_new_i32();
1478 tcg_gen_trunc_tl_i32(t1
, t0
);
1479 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1480 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1481 tcg_temp_free_i32(t1
);
1483 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1487 #if defined(TARGET_MIPS64)
1489 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1493 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1497 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1502 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1504 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1509 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1513 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1517 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1521 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1526 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1531 static void gen_arith (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1532 int rd
, int rs
, int rt
)
1534 const char *opn
= "arith";
1536 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1537 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1538 /* If no destination, treat it as a NOP.
1539 For add & sub, we must generate the overflow exception when needed. */
1547 TCGv t0
= tcg_temp_local_new();
1548 TCGv t1
= tcg_temp_new();
1549 TCGv t2
= tcg_temp_new();
1550 int l1
= gen_new_label();
1552 gen_load_gpr(t1
, rs
);
1553 gen_load_gpr(t2
, rt
);
1554 tcg_gen_add_tl(t0
, t1
, t2
);
1555 tcg_gen_ext32s_tl(t0
, t0
);
1556 tcg_gen_xor_tl(t1
, t1
, t2
);
1557 tcg_gen_xor_tl(t2
, t0
, t2
);
1558 tcg_gen_andc_tl(t1
, t2
, t1
);
1560 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1562 /* operands of same sign, result different sign */
1563 generate_exception(ctx
, EXCP_OVERFLOW
);
1565 gen_store_gpr(t0
, rd
);
1571 if (rs
!= 0 && rt
!= 0) {
1572 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1573 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1574 } else if (rs
== 0 && rt
!= 0) {
1575 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1576 } else if (rs
!= 0 && rt
== 0) {
1577 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1579 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1585 TCGv t0
= tcg_temp_local_new();
1586 TCGv t1
= tcg_temp_new();
1587 TCGv t2
= tcg_temp_new();
1588 int l1
= gen_new_label();
1590 gen_load_gpr(t1
, rs
);
1591 gen_load_gpr(t2
, rt
);
1592 tcg_gen_sub_tl(t0
, t1
, t2
);
1593 tcg_gen_ext32s_tl(t0
, t0
);
1594 tcg_gen_xor_tl(t2
, t1
, t2
);
1595 tcg_gen_xor_tl(t1
, t0
, t1
);
1596 tcg_gen_and_tl(t1
, t1
, t2
);
1598 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1600 /* operands of different sign, first operand and result different sign */
1601 generate_exception(ctx
, EXCP_OVERFLOW
);
1603 gen_store_gpr(t0
, rd
);
1609 if (rs
!= 0 && rt
!= 0) {
1610 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1611 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1612 } else if (rs
== 0 && rt
!= 0) {
1613 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1614 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1615 } else if (rs
!= 0 && rt
== 0) {
1616 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1618 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1622 #if defined(TARGET_MIPS64)
1625 TCGv t0
= tcg_temp_local_new();
1626 TCGv t1
= tcg_temp_new();
1627 TCGv t2
= tcg_temp_new();
1628 int l1
= gen_new_label();
1630 gen_load_gpr(t1
, rs
);
1631 gen_load_gpr(t2
, rt
);
1632 tcg_gen_add_tl(t0
, t1
, t2
);
1633 tcg_gen_xor_tl(t1
, t1
, t2
);
1634 tcg_gen_xor_tl(t2
, t0
, t2
);
1635 tcg_gen_andc_tl(t1
, t2
, t1
);
1637 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1639 /* operands of same sign, result different sign */
1640 generate_exception(ctx
, EXCP_OVERFLOW
);
1642 gen_store_gpr(t0
, rd
);
1648 if (rs
!= 0 && rt
!= 0) {
1649 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1650 } else if (rs
== 0 && rt
!= 0) {
1651 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1652 } else if (rs
!= 0 && rt
== 0) {
1653 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1655 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1661 TCGv t0
= tcg_temp_local_new();
1662 TCGv t1
= tcg_temp_new();
1663 TCGv t2
= tcg_temp_new();
1664 int l1
= gen_new_label();
1666 gen_load_gpr(t1
, rs
);
1667 gen_load_gpr(t2
, rt
);
1668 tcg_gen_sub_tl(t0
, t1
, t2
);
1669 tcg_gen_xor_tl(t2
, t1
, t2
);
1670 tcg_gen_xor_tl(t1
, t0
, t1
);
1671 tcg_gen_and_tl(t1
, t1
, t2
);
1673 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1675 /* operands of different sign, first operand and result different sign */
1676 generate_exception(ctx
, EXCP_OVERFLOW
);
1678 gen_store_gpr(t0
, rd
);
1684 if (rs
!= 0 && rt
!= 0) {
1685 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1686 } else if (rs
== 0 && rt
!= 0) {
1687 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1688 } else if (rs
!= 0 && rt
== 0) {
1689 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1691 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1697 if (likely(rs
!= 0 && rt
!= 0)) {
1698 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1699 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1701 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1706 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1709 /* Conditional move */
1710 static void gen_cond_move (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1712 const char *opn
= "cond move";
1716 /* If no destination, treat it as a NOP.
1717 For add & sub, we must generate the overflow exception when needed. */
1722 l1
= gen_new_label();
1725 if (likely(rt
!= 0))
1726 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1732 if (likely(rt
!= 0))
1733 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1738 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1743 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1747 static void gen_logic (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1749 const char *opn
= "logic";
1752 /* If no destination, treat it as a NOP. */
1759 if (likely(rs
!= 0 && rt
!= 0)) {
1760 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1762 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1767 if (rs
!= 0 && rt
!= 0) {
1768 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1769 } else if (rs
== 0 && rt
!= 0) {
1770 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1771 } else if (rs
!= 0 && rt
== 0) {
1772 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1774 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1779 if (likely(rs
!= 0 && rt
!= 0)) {
1780 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1781 } else if (rs
== 0 && rt
!= 0) {
1782 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1783 } else if (rs
!= 0 && rt
== 0) {
1784 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1786 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1791 if (likely(rs
!= 0 && rt
!= 0)) {
1792 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1793 } else if (rs
== 0 && rt
!= 0) {
1794 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1795 } else if (rs
!= 0 && rt
== 0) {
1796 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1798 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1803 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1806 /* Set on lower than */
1807 static void gen_slt (CPUState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1809 const char *opn
= "slt";
1813 /* If no destination, treat it as a NOP. */
1818 t0
= tcg_temp_new();
1819 t1
= tcg_temp_new();
1820 gen_load_gpr(t0
, rs
);
1821 gen_load_gpr(t1
, rt
);
1824 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
1828 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
1832 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1838 static void gen_shift (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
,
1839 int rd
, int rs
, int rt
)
1841 const char *opn
= "shifts";
1845 /* If no destination, treat it as a NOP.
1846 For add & sub, we must generate the overflow exception when needed. */
1851 t0
= tcg_temp_new();
1852 t1
= tcg_temp_new();
1853 gen_load_gpr(t0
, rs
);
1854 gen_load_gpr(t1
, rt
);
1857 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1858 tcg_gen_shl_tl(t0
, t1
, t0
);
1859 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1863 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1864 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1868 tcg_gen_ext32u_tl(t1
, t1
);
1869 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1870 tcg_gen_shr_tl(t0
, t1
, t0
);
1871 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1876 TCGv_i32 t2
= tcg_temp_new_i32();
1877 TCGv_i32 t3
= tcg_temp_new_i32();
1879 tcg_gen_trunc_tl_i32(t2
, t0
);
1880 tcg_gen_trunc_tl_i32(t3
, t1
);
1881 tcg_gen_andi_i32(t2
, t2
, 0x1f);
1882 tcg_gen_rotr_i32(t2
, t3
, t2
);
1883 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
1884 tcg_temp_free_i32(t2
);
1885 tcg_temp_free_i32(t3
);
1889 #if defined(TARGET_MIPS64)
1891 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1892 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
1896 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1897 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1901 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1902 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
1906 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1907 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
1912 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1917 /* Arithmetic on HI/LO registers */
1918 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
1920 const char *opn
= "hilo";
1922 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
1929 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
1933 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
1938 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
1940 tcg_gen_movi_tl(cpu_HI
[0], 0);
1945 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
1947 tcg_gen_movi_tl(cpu_LO
[0], 0);
1951 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
1954 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
1957 const char *opn
= "mul/div";
1963 #if defined(TARGET_MIPS64)
1967 t0
= tcg_temp_local_new();
1968 t1
= tcg_temp_local_new();
1971 t0
= tcg_temp_new();
1972 t1
= tcg_temp_new();
1976 gen_load_gpr(t0
, rs
);
1977 gen_load_gpr(t1
, rt
);
1981 int l1
= gen_new_label();
1982 int l2
= gen_new_label();
1984 tcg_gen_ext32s_tl(t0
, t0
);
1985 tcg_gen_ext32s_tl(t1
, t1
);
1986 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
1987 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
1988 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
1990 tcg_gen_mov_tl(cpu_LO
[0], t0
);
1991 tcg_gen_movi_tl(cpu_HI
[0], 0);
1994 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
1995 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
1996 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
1997 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2004 int l1
= gen_new_label();
2006 tcg_gen_ext32u_tl(t0
, t0
);
2007 tcg_gen_ext32u_tl(t1
, t1
);
2008 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2009 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2010 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2011 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2012 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2019 TCGv_i64 t2
= tcg_temp_new_i64();
2020 TCGv_i64 t3
= tcg_temp_new_i64();
2022 tcg_gen_ext_tl_i64(t2
, t0
);
2023 tcg_gen_ext_tl_i64(t3
, t1
);
2024 tcg_gen_mul_i64(t2
, t2
, t3
);
2025 tcg_temp_free_i64(t3
);
2026 tcg_gen_trunc_i64_tl(t0
, t2
);
2027 tcg_gen_shri_i64(t2
, t2
, 32);
2028 tcg_gen_trunc_i64_tl(t1
, t2
);
2029 tcg_temp_free_i64(t2
);
2030 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2031 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2037 TCGv_i64 t2
= tcg_temp_new_i64();
2038 TCGv_i64 t3
= tcg_temp_new_i64();
2040 tcg_gen_ext32u_tl(t0
, t0
);
2041 tcg_gen_ext32u_tl(t1
, t1
);
2042 tcg_gen_extu_tl_i64(t2
, t0
);
2043 tcg_gen_extu_tl_i64(t3
, t1
);
2044 tcg_gen_mul_i64(t2
, t2
, t3
);
2045 tcg_temp_free_i64(t3
);
2046 tcg_gen_trunc_i64_tl(t0
, t2
);
2047 tcg_gen_shri_i64(t2
, t2
, 32);
2048 tcg_gen_trunc_i64_tl(t1
, t2
);
2049 tcg_temp_free_i64(t2
);
2050 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2051 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2055 #if defined(TARGET_MIPS64)
2058 int l1
= gen_new_label();
2059 int l2
= gen_new_label();
2061 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2062 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2063 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2064 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2065 tcg_gen_movi_tl(cpu_HI
[0], 0);
2068 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2069 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2076 int l1
= gen_new_label();
2078 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2079 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2080 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2086 gen_helper_dmult(t0
, t1
);
2090 gen_helper_dmultu(t0
, t1
);
2096 TCGv_i64 t2
= tcg_temp_new_i64();
2097 TCGv_i64 t3
= tcg_temp_new_i64();
2099 tcg_gen_ext_tl_i64(t2
, t0
);
2100 tcg_gen_ext_tl_i64(t3
, t1
);
2101 tcg_gen_mul_i64(t2
, t2
, t3
);
2102 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2103 tcg_gen_add_i64(t2
, t2
, t3
);
2104 tcg_temp_free_i64(t3
);
2105 tcg_gen_trunc_i64_tl(t0
, t2
);
2106 tcg_gen_shri_i64(t2
, t2
, 32);
2107 tcg_gen_trunc_i64_tl(t1
, t2
);
2108 tcg_temp_free_i64(t2
);
2109 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2110 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2116 TCGv_i64 t2
= tcg_temp_new_i64();
2117 TCGv_i64 t3
= tcg_temp_new_i64();
2119 tcg_gen_ext32u_tl(t0
, t0
);
2120 tcg_gen_ext32u_tl(t1
, t1
);
2121 tcg_gen_extu_tl_i64(t2
, t0
);
2122 tcg_gen_extu_tl_i64(t3
, t1
);
2123 tcg_gen_mul_i64(t2
, t2
, t3
);
2124 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2125 tcg_gen_add_i64(t2
, t2
, t3
);
2126 tcg_temp_free_i64(t3
);
2127 tcg_gen_trunc_i64_tl(t0
, t2
);
2128 tcg_gen_shri_i64(t2
, t2
, 32);
2129 tcg_gen_trunc_i64_tl(t1
, t2
);
2130 tcg_temp_free_i64(t2
);
2131 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2132 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2138 TCGv_i64 t2
= tcg_temp_new_i64();
2139 TCGv_i64 t3
= tcg_temp_new_i64();
2141 tcg_gen_ext_tl_i64(t2
, t0
);
2142 tcg_gen_ext_tl_i64(t3
, t1
);
2143 tcg_gen_mul_i64(t2
, t2
, t3
);
2144 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2145 tcg_gen_sub_i64(t2
, t3
, t2
);
2146 tcg_temp_free_i64(t3
);
2147 tcg_gen_trunc_i64_tl(t0
, t2
);
2148 tcg_gen_shri_i64(t2
, t2
, 32);
2149 tcg_gen_trunc_i64_tl(t1
, t2
);
2150 tcg_temp_free_i64(t2
);
2151 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2152 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2158 TCGv_i64 t2
= tcg_temp_new_i64();
2159 TCGv_i64 t3
= tcg_temp_new_i64();
2161 tcg_gen_ext32u_tl(t0
, t0
);
2162 tcg_gen_ext32u_tl(t1
, t1
);
2163 tcg_gen_extu_tl_i64(t2
, t0
);
2164 tcg_gen_extu_tl_i64(t3
, t1
);
2165 tcg_gen_mul_i64(t2
, t2
, t3
);
2166 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2167 tcg_gen_sub_i64(t2
, t3
, t2
);
2168 tcg_temp_free_i64(t3
);
2169 tcg_gen_trunc_i64_tl(t0
, t2
);
2170 tcg_gen_shri_i64(t2
, t2
, 32);
2171 tcg_gen_trunc_i64_tl(t1
, t2
);
2172 tcg_temp_free_i64(t2
);
2173 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2174 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2180 generate_exception(ctx
, EXCP_RI
);
2183 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2189 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2190 int rd
, int rs
, int rt
)
2192 const char *opn
= "mul vr54xx";
2193 TCGv t0
= tcg_temp_new();
2194 TCGv t1
= tcg_temp_new();
2196 gen_load_gpr(t0
, rs
);
2197 gen_load_gpr(t1
, rt
);
2200 case OPC_VR54XX_MULS
:
2201 gen_helper_muls(t0
, t0
, t1
);
2204 case OPC_VR54XX_MULSU
:
2205 gen_helper_mulsu(t0
, t0
, t1
);
2208 case OPC_VR54XX_MACC
:
2209 gen_helper_macc(t0
, t0
, t1
);
2212 case OPC_VR54XX_MACCU
:
2213 gen_helper_maccu(t0
, t0
, t1
);
2216 case OPC_VR54XX_MSAC
:
2217 gen_helper_msac(t0
, t0
, t1
);
2220 case OPC_VR54XX_MSACU
:
2221 gen_helper_msacu(t0
, t0
, t1
);
2224 case OPC_VR54XX_MULHI
:
2225 gen_helper_mulhi(t0
, t0
, t1
);
2228 case OPC_VR54XX_MULHIU
:
2229 gen_helper_mulhiu(t0
, t0
, t1
);
2232 case OPC_VR54XX_MULSHI
:
2233 gen_helper_mulshi(t0
, t0
, t1
);
2236 case OPC_VR54XX_MULSHIU
:
2237 gen_helper_mulshiu(t0
, t0
, t1
);
2240 case OPC_VR54XX_MACCHI
:
2241 gen_helper_macchi(t0
, t0
, t1
);
2244 case OPC_VR54XX_MACCHIU
:
2245 gen_helper_macchiu(t0
, t0
, t1
);
2248 case OPC_VR54XX_MSACHI
:
2249 gen_helper_msachi(t0
, t0
, t1
);
2252 case OPC_VR54XX_MSACHIU
:
2253 gen_helper_msachiu(t0
, t0
, t1
);
2257 MIPS_INVAL("mul vr54xx");
2258 generate_exception(ctx
, EXCP_RI
);
2261 gen_store_gpr(t0
, rd
);
2262 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2269 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2272 const char *opn
= "CLx";
2280 t0
= tcg_temp_new();
2281 gen_load_gpr(t0
, rs
);
2284 gen_helper_clo(cpu_gpr
[rd
], t0
);
2288 gen_helper_clz(cpu_gpr
[rd
], t0
);
2291 #if defined(TARGET_MIPS64)
2293 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2297 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2302 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2307 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2308 int rs
, int rt
, int16_t imm
)
2311 TCGv t0
= tcg_temp_new();
2312 TCGv t1
= tcg_temp_new();
2315 /* Load needed operands */
2323 /* Compare two registers */
2325 gen_load_gpr(t0
, rs
);
2326 gen_load_gpr(t1
, rt
);
2336 /* Compare register to immediate */
2337 if (rs
!= 0 || imm
!= 0) {
2338 gen_load_gpr(t0
, rs
);
2339 tcg_gen_movi_tl(t1
, (int32_t)imm
);
2346 case OPC_TEQ
: /* rs == rs */
2347 case OPC_TEQI
: /* r0 == 0 */
2348 case OPC_TGE
: /* rs >= rs */
2349 case OPC_TGEI
: /* r0 >= 0 */
2350 case OPC_TGEU
: /* rs >= rs unsigned */
2351 case OPC_TGEIU
: /* r0 >= 0 unsigned */
2353 generate_exception(ctx
, EXCP_TRAP
);
2355 case OPC_TLT
: /* rs < rs */
2356 case OPC_TLTI
: /* r0 < 0 */
2357 case OPC_TLTU
: /* rs < rs unsigned */
2358 case OPC_TLTIU
: /* r0 < 0 unsigned */
2359 case OPC_TNE
: /* rs != rs */
2360 case OPC_TNEI
: /* r0 != 0 */
2361 /* Never trap: treat as NOP. */
2365 int l1
= gen_new_label();
2370 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
2374 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
2378 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
2382 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
2386 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
2390 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
2393 generate_exception(ctx
, EXCP_TRAP
);
2400 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
2402 TranslationBlock
*tb
;
2404 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
2405 likely(!ctx
->singlestep_enabled
)) {
2408 tcg_gen_exit_tb((long)tb
+ n
);
2411 if (ctx
->singlestep_enabled
) {
2412 save_cpu_state(ctx
, 0);
2413 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
2419 /* Branches (before delay slot) */
2420 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
2422 int rs
, int rt
, int32_t offset
)
2424 target_ulong btgt
= -1;
2426 int bcond_compute
= 0;
2427 TCGv t0
= tcg_temp_new();
2428 TCGv t1
= tcg_temp_new();
2430 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2431 #ifdef MIPS_DEBUG_DISAS
2432 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
2434 generate_exception(ctx
, EXCP_RI
);
2438 /* Load needed operands */
2444 /* Compare two registers */
2446 gen_load_gpr(t0
, rs
);
2447 gen_load_gpr(t1
, rt
);
2450 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2466 /* Compare to zero */
2468 gen_load_gpr(t0
, rs
);
2471 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2478 /* Jump to immediate */
2479 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
2485 /* Jump to register */
2486 if (offset
!= 0 && offset
!= 16) {
2487 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2488 others are reserved. */
2489 MIPS_INVAL("jump hint");
2490 generate_exception(ctx
, EXCP_RI
);
2493 gen_load_gpr(btarget
, rs
);
2496 MIPS_INVAL("branch/jump");
2497 generate_exception(ctx
, EXCP_RI
);
2500 if (bcond_compute
== 0) {
2501 /* No condition to be computed */
2503 case OPC_BEQ
: /* rx == rx */
2504 case OPC_BEQL
: /* rx == rx likely */
2505 case OPC_BGEZ
: /* 0 >= 0 */
2506 case OPC_BGEZL
: /* 0 >= 0 likely */
2507 case OPC_BLEZ
: /* 0 <= 0 */
2508 case OPC_BLEZL
: /* 0 <= 0 likely */
2510 ctx
->hflags
|= MIPS_HFLAG_B
;
2511 MIPS_DEBUG("balways");
2514 case OPC_BGEZAL
: /* 0 >= 0 */
2515 case OPC_BGEZALL
: /* 0 >= 0 likely */
2516 ctx
->hflags
|= (opc
== OPC_BGEZALS
2518 : MIPS_HFLAG_BDS32
);
2519 /* Always take and link */
2521 ctx
->hflags
|= MIPS_HFLAG_B
;
2522 MIPS_DEBUG("balways and link");
2524 case OPC_BNE
: /* rx != rx */
2525 case OPC_BGTZ
: /* 0 > 0 */
2526 case OPC_BLTZ
: /* 0 < 0 */
2528 MIPS_DEBUG("bnever (NOP)");
2531 case OPC_BLTZAL
: /* 0 < 0 */
2532 ctx
->hflags
|= (opc
== OPC_BLTZALS
2534 : MIPS_HFLAG_BDS32
);
2535 /* Handle as an unconditional branch to get correct delay
2538 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
2539 ctx
->hflags
|= MIPS_HFLAG_B
;
2540 MIPS_DEBUG("bnever and link");
2542 case OPC_BLTZALL
: /* 0 < 0 likely */
2543 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
2544 /* Skip the instruction in the delay slot */
2545 MIPS_DEBUG("bnever, link and skip");
2548 case OPC_BNEL
: /* rx != rx likely */
2549 case OPC_BGTZL
: /* 0 > 0 likely */
2550 case OPC_BLTZL
: /* 0 < 0 likely */
2551 /* Skip the instruction in the delay slot */
2552 MIPS_DEBUG("bnever and skip");
2556 ctx
->hflags
|= MIPS_HFLAG_B
;
2557 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
2561 ctx
->hflags
|= MIPS_HFLAG_BX
;
2566 ctx
->hflags
|= MIPS_HFLAG_B
;
2567 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
2569 : MIPS_HFLAG_BDS32
);
2570 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
2573 ctx
->hflags
|= MIPS_HFLAG_BR
;
2574 if (insn_bytes
== 4)
2575 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
2576 MIPS_DEBUG("jr %s", regnames
[rs
]);
2582 ctx
->hflags
|= MIPS_HFLAG_BR
;
2583 ctx
->hflags
|= (opc
== OPC_JALRS
2585 : MIPS_HFLAG_BDS32
);
2586 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
2589 MIPS_INVAL("branch/jump");
2590 generate_exception(ctx
, EXCP_RI
);
2596 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2597 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
2598 regnames
[rs
], regnames
[rt
], btgt
);
2601 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2602 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
2603 regnames
[rs
], regnames
[rt
], btgt
);
2606 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2607 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
2608 regnames
[rs
], regnames
[rt
], btgt
);
2611 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2612 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
2613 regnames
[rs
], regnames
[rt
], btgt
);
2616 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2617 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2620 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2621 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2625 ctx
->hflags
|= (opc
== OPC_BGEZALS
2627 : MIPS_HFLAG_BDS32
);
2628 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2629 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2633 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2635 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2638 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2639 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2642 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2643 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2646 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2647 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2650 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2651 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2654 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2655 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2658 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2659 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2663 ctx
->hflags
|= (opc
== OPC_BLTZALS
2665 : MIPS_HFLAG_BDS32
);
2666 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2668 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2670 ctx
->hflags
|= MIPS_HFLAG_BC
;
2673 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2675 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2677 ctx
->hflags
|= MIPS_HFLAG_BL
;
2680 MIPS_INVAL("conditional branch/jump");
2681 generate_exception(ctx
, EXCP_RI
);
2685 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
2686 blink
, ctx
->hflags
, btgt
);
2688 ctx
->btarget
= btgt
;
2690 int post_delay
= insn_bytes
;
2691 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
2693 if (opc
!= OPC_JALRC
)
2694 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
2696 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
2700 if (insn_bytes
== 2)
2701 ctx
->hflags
|= MIPS_HFLAG_B16
;
2706 /* special3 bitfield operations */
2707 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
2708 int rs
, int lsb
, int msb
)
2710 TCGv t0
= tcg_temp_new();
2711 TCGv t1
= tcg_temp_new();
2714 gen_load_gpr(t1
, rs
);
2719 tcg_gen_shri_tl(t0
, t1
, lsb
);
2721 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
2723 tcg_gen_ext32s_tl(t0
, t0
);
2726 #if defined(TARGET_MIPS64)
2728 tcg_gen_shri_tl(t0
, t1
, lsb
);
2730 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
2734 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
2735 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2738 tcg_gen_shri_tl(t0
, t1
, lsb
);
2739 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
2745 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
2746 gen_load_gpr(t0
, rt
);
2747 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2748 tcg_gen_shli_tl(t1
, t1
, lsb
);
2749 tcg_gen_andi_tl(t1
, t1
, mask
);
2750 tcg_gen_or_tl(t0
, t0
, t1
);
2751 tcg_gen_ext32s_tl(t0
, t0
);
2753 #if defined(TARGET_MIPS64)
2757 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
2758 gen_load_gpr(t0
, rt
);
2759 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2760 tcg_gen_shli_tl(t1
, t1
, lsb
);
2761 tcg_gen_andi_tl(t1
, t1
, mask
);
2762 tcg_gen_or_tl(t0
, t0
, t1
);
2767 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
2768 gen_load_gpr(t0
, rt
);
2769 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2770 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
2771 tcg_gen_andi_tl(t1
, t1
, mask
);
2772 tcg_gen_or_tl(t0
, t0
, t1
);
2777 gen_load_gpr(t0
, rt
);
2778 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
2779 gen_load_gpr(t0
, rt
);
2780 tcg_gen_andi_tl(t0
, t0
, ~mask
);
2781 tcg_gen_shli_tl(t1
, t1
, lsb
);
2782 tcg_gen_andi_tl(t1
, t1
, mask
);
2783 tcg_gen_or_tl(t0
, t0
, t1
);
2788 MIPS_INVAL("bitops");
2789 generate_exception(ctx
, EXCP_RI
);
2794 gen_store_gpr(t0
, rt
);
2799 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
2804 /* If no destination, treat it as a NOP. */
2809 t0
= tcg_temp_new();
2810 gen_load_gpr(t0
, rt
);
2814 TCGv t1
= tcg_temp_new();
2816 tcg_gen_shri_tl(t1
, t0
, 8);
2817 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
2818 tcg_gen_shli_tl(t0
, t0
, 8);
2819 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
2820 tcg_gen_or_tl(t0
, t0
, t1
);
2822 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2826 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
2829 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
2831 #if defined(TARGET_MIPS64)
2834 TCGv t1
= tcg_temp_new();
2836 tcg_gen_shri_tl(t1
, t0
, 8);
2837 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
2838 tcg_gen_shli_tl(t0
, t0
, 8);
2839 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
2840 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
2846 TCGv t1
= tcg_temp_new();
2848 tcg_gen_shri_tl(t1
, t0
, 16);
2849 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
2850 tcg_gen_shli_tl(t0
, t0
, 16);
2851 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
2852 tcg_gen_or_tl(t0
, t0
, t1
);
2853 tcg_gen_shri_tl(t1
, t0
, 32);
2854 tcg_gen_shli_tl(t0
, t0
, 32);
2855 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
2861 MIPS_INVAL("bsfhl");
2862 generate_exception(ctx
, EXCP_RI
);
2869 #ifndef CONFIG_USER_ONLY
2870 /* CP0 (MMU and control) */
2871 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
2873 TCGv_i32 t0
= tcg_temp_new_i32();
2875 tcg_gen_ld_i32(t0
, cpu_env
, off
);
2876 tcg_gen_ext_i32_tl(arg
, t0
);
2877 tcg_temp_free_i32(t0
);
2880 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
2882 tcg_gen_ld_tl(arg
, cpu_env
, off
);
2883 tcg_gen_ext32s_tl(arg
, arg
);
2886 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
2888 TCGv_i32 t0
= tcg_temp_new_i32();
2890 tcg_gen_trunc_tl_i32(t0
, arg
);
2891 tcg_gen_st_i32(t0
, cpu_env
, off
);
2892 tcg_temp_free_i32(t0
);
2895 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
2897 tcg_gen_ext32s_tl(arg
, arg
);
2898 tcg_gen_st_tl(arg
, cpu_env
, off
);
2901 static void gen_mfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
2903 const char *rn
= "invalid";
2906 check_insn(env
, ctx
, ISA_MIPS32
);
2912 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Index
));
2916 check_insn(env
, ctx
, ASE_MT
);
2917 gen_helper_mfc0_mvpcontrol(arg
);
2921 check_insn(env
, ctx
, ASE_MT
);
2922 gen_helper_mfc0_mvpconf0(arg
);
2926 check_insn(env
, ctx
, ASE_MT
);
2927 gen_helper_mfc0_mvpconf1(arg
);
2937 gen_helper_mfc0_random(arg
);
2941 check_insn(env
, ctx
, ASE_MT
);
2942 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEControl
));
2946 check_insn(env
, ctx
, ASE_MT
);
2947 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf0
));
2951 check_insn(env
, ctx
, ASE_MT
);
2952 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf1
));
2956 check_insn(env
, ctx
, ASE_MT
);
2957 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_YQMask
));
2961 check_insn(env
, ctx
, ASE_MT
);
2962 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_VPESchedule
));
2966 check_insn(env
, ctx
, ASE_MT
);
2967 gen_mfc0_load64(arg
, offsetof(CPUState
, CP0_VPEScheFBack
));
2968 rn
= "VPEScheFBack";
2971 check_insn(env
, ctx
, ASE_MT
);
2972 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEOpt
));
2982 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
2983 tcg_gen_ext32s_tl(arg
, arg
);
2987 check_insn(env
, ctx
, ASE_MT
);
2988 gen_helper_mfc0_tcstatus(arg
);
2992 check_insn(env
, ctx
, ASE_MT
);
2993 gen_helper_mfc0_tcbind(arg
);
2997 check_insn(env
, ctx
, ASE_MT
);
2998 gen_helper_mfc0_tcrestart(arg
);
3002 check_insn(env
, ctx
, ASE_MT
);
3003 gen_helper_mfc0_tchalt(arg
);
3007 check_insn(env
, ctx
, ASE_MT
);
3008 gen_helper_mfc0_tccontext(arg
);
3012 check_insn(env
, ctx
, ASE_MT
);
3013 gen_helper_mfc0_tcschedule(arg
);
3017 check_insn(env
, ctx
, ASE_MT
);
3018 gen_helper_mfc0_tcschefback(arg
);
3028 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
3029 tcg_gen_ext32s_tl(arg
, arg
);
3039 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_Context
));
3040 tcg_gen_ext32s_tl(arg
, arg
);
3044 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3045 rn
= "ContextConfig";
3054 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageMask
));
3058 check_insn(env
, ctx
, ISA_MIPS32R2
);
3059 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageGrain
));
3069 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Wired
));
3073 check_insn(env
, ctx
, ISA_MIPS32R2
);
3074 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf0
));
3078 check_insn(env
, ctx
, ISA_MIPS32R2
);
3079 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf1
));
3083 check_insn(env
, ctx
, ISA_MIPS32R2
);
3084 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf2
));
3088 check_insn(env
, ctx
, ISA_MIPS32R2
);
3089 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf3
));
3093 check_insn(env
, ctx
, ISA_MIPS32R2
);
3094 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf4
));
3104 check_insn(env
, ctx
, ISA_MIPS32R2
);
3105 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_HWREna
));
3115 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
3116 tcg_gen_ext32s_tl(arg
, arg
);
3126 /* Mark as an IO operation because we read the time. */
3129 gen_helper_mfc0_count(arg
);
3132 ctx
->bstate
= BS_STOP
;
3136 /* 6,7 are implementation dependent */
3144 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
3145 tcg_gen_ext32s_tl(arg
, arg
);
3155 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Compare
));
3158 /* 6,7 are implementation dependent */
3166 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Status
));
3170 check_insn(env
, ctx
, ISA_MIPS32R2
);
3171 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_IntCtl
));
3175 check_insn(env
, ctx
, ISA_MIPS32R2
);
3176 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSCtl
));
3180 check_insn(env
, ctx
, ISA_MIPS32R2
);
3181 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSMap
));
3191 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Cause
));
3201 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
3202 tcg_gen_ext32s_tl(arg
, arg
);
3212 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PRid
));
3216 check_insn(env
, ctx
, ISA_MIPS32R2
);
3217 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_EBase
));
3227 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config0
));
3231 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config1
));
3235 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config2
));
3239 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config3
));
3242 /* 4,5 are reserved */
3243 /* 6,7 are implementation dependent */
3245 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config6
));
3249 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config7
));
3259 gen_helper_mfc0_lladdr(arg
);
3269 gen_helper_1i(mfc0_watchlo
, arg
, sel
);
3279 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
3289 #if defined(TARGET_MIPS64)
3290 check_insn(env
, ctx
, ISA_MIPS3
);
3291 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
3292 tcg_gen_ext32s_tl(arg
, arg
);
3301 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3304 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Framemask
));
3312 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3313 rn
= "'Diagnostic"; /* implementation dependent */
3318 gen_helper_mfc0_debug(arg
); /* EJTAG support */
3322 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3323 rn
= "TraceControl";
3326 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3327 rn
= "TraceControl2";
3330 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3331 rn
= "UserTraceData";
3334 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3345 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
3346 tcg_gen_ext32s_tl(arg
, arg
);
3356 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Performance0
));
3357 rn
= "Performance0";
3360 // gen_helper_mfc0_performance1(arg);
3361 rn
= "Performance1";
3364 // gen_helper_mfc0_performance2(arg);
3365 rn
= "Performance2";
3368 // gen_helper_mfc0_performance3(arg);
3369 rn
= "Performance3";
3372 // gen_helper_mfc0_performance4(arg);
3373 rn
= "Performance4";
3376 // gen_helper_mfc0_performance5(arg);
3377 rn
= "Performance5";
3380 // gen_helper_mfc0_performance6(arg);
3381 rn
= "Performance6";
3384 // gen_helper_mfc0_performance7(arg);
3385 rn
= "Performance7";
3392 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3398 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3411 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagLo
));
3418 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataLo
));
3431 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagHi
));
3438 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataHi
));
3448 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
3449 tcg_gen_ext32s_tl(arg
, arg
);
3460 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DESAVE
));
3470 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3474 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3475 generate_exception(ctx
, EXCP_RI
);
3478 static void gen_mtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3480 const char *rn
= "invalid";
3483 check_insn(env
, ctx
, ISA_MIPS32
);
3492 gen_helper_mtc0_index(arg
);
3496 check_insn(env
, ctx
, ASE_MT
);
3497 gen_helper_mtc0_mvpcontrol(arg
);
3501 check_insn(env
, ctx
, ASE_MT
);
3506 check_insn(env
, ctx
, ASE_MT
);
3521 check_insn(env
, ctx
, ASE_MT
);
3522 gen_helper_mtc0_vpecontrol(arg
);
3526 check_insn(env
, ctx
, ASE_MT
);
3527 gen_helper_mtc0_vpeconf0(arg
);
3531 check_insn(env
, ctx
, ASE_MT
);
3532 gen_helper_mtc0_vpeconf1(arg
);
3536 check_insn(env
, ctx
, ASE_MT
);
3537 gen_helper_mtc0_yqmask(arg
);
3541 check_insn(env
, ctx
, ASE_MT
);
3542 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_VPESchedule
));
3546 check_insn(env
, ctx
, ASE_MT
);
3547 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_VPEScheFBack
));
3548 rn
= "VPEScheFBack";
3551 check_insn(env
, ctx
, ASE_MT
);
3552 gen_helper_mtc0_vpeopt(arg
);
3562 gen_helper_mtc0_entrylo0(arg
);
3566 check_insn(env
, ctx
, ASE_MT
);
3567 gen_helper_mtc0_tcstatus(arg
);
3571 check_insn(env
, ctx
, ASE_MT
);
3572 gen_helper_mtc0_tcbind(arg
);
3576 check_insn(env
, ctx
, ASE_MT
);
3577 gen_helper_mtc0_tcrestart(arg
);
3581 check_insn(env
, ctx
, ASE_MT
);
3582 gen_helper_mtc0_tchalt(arg
);
3586 check_insn(env
, ctx
, ASE_MT
);
3587 gen_helper_mtc0_tccontext(arg
);
3591 check_insn(env
, ctx
, ASE_MT
);
3592 gen_helper_mtc0_tcschedule(arg
);
3596 check_insn(env
, ctx
, ASE_MT
);
3597 gen_helper_mtc0_tcschefback(arg
);
3607 gen_helper_mtc0_entrylo1(arg
);
3617 gen_helper_mtc0_context(arg
);
3621 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3622 rn
= "ContextConfig";
3631 gen_helper_mtc0_pagemask(arg
);
3635 check_insn(env
, ctx
, ISA_MIPS32R2
);
3636 gen_helper_mtc0_pagegrain(arg
);
3646 gen_helper_mtc0_wired(arg
);
3650 check_insn(env
, ctx
, ISA_MIPS32R2
);
3651 gen_helper_mtc0_srsconf0(arg
);
3655 check_insn(env
, ctx
, ISA_MIPS32R2
);
3656 gen_helper_mtc0_srsconf1(arg
);
3660 check_insn(env
, ctx
, ISA_MIPS32R2
);
3661 gen_helper_mtc0_srsconf2(arg
);
3665 check_insn(env
, ctx
, ISA_MIPS32R2
);
3666 gen_helper_mtc0_srsconf3(arg
);
3670 check_insn(env
, ctx
, ISA_MIPS32R2
);
3671 gen_helper_mtc0_srsconf4(arg
);
3681 check_insn(env
, ctx
, ISA_MIPS32R2
);
3682 gen_helper_mtc0_hwrena(arg
);
3696 gen_helper_mtc0_count(arg
);
3699 /* 6,7 are implementation dependent */
3707 gen_helper_mtc0_entryhi(arg
);
3717 gen_helper_mtc0_compare(arg
);
3720 /* 6,7 are implementation dependent */
3728 save_cpu_state(ctx
, 1);
3729 gen_helper_mtc0_status(arg
);
3730 /* BS_STOP isn't good enough here, hflags may have changed. */
3731 gen_save_pc(ctx
->pc
+ 4);
3732 ctx
->bstate
= BS_EXCP
;
3736 check_insn(env
, ctx
, ISA_MIPS32R2
);
3737 gen_helper_mtc0_intctl(arg
);
3738 /* Stop translation as we may have switched the execution mode */
3739 ctx
->bstate
= BS_STOP
;
3743 check_insn(env
, ctx
, ISA_MIPS32R2
);
3744 gen_helper_mtc0_srsctl(arg
);
3745 /* Stop translation as we may have switched the execution mode */
3746 ctx
->bstate
= BS_STOP
;
3750 check_insn(env
, ctx
, ISA_MIPS32R2
);
3751 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_SRSMap
));
3752 /* Stop translation as we may have switched the execution mode */
3753 ctx
->bstate
= BS_STOP
;
3763 save_cpu_state(ctx
, 1);
3764 gen_helper_mtc0_cause(arg
);
3774 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_EPC
));
3788 check_insn(env
, ctx
, ISA_MIPS32R2
);
3789 gen_helper_mtc0_ebase(arg
);
3799 gen_helper_mtc0_config0(arg
);
3801 /* Stop translation as we may have switched the execution mode */
3802 ctx
->bstate
= BS_STOP
;
3805 /* ignored, read only */
3809 gen_helper_mtc0_config2(arg
);
3811 /* Stop translation as we may have switched the execution mode */
3812 ctx
->bstate
= BS_STOP
;
3815 /* ignored, read only */
3818 /* 4,5 are reserved */
3819 /* 6,7 are implementation dependent */
3829 rn
= "Invalid config selector";
3836 gen_helper_mtc0_lladdr(arg
);
3846 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
3856 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
3866 #if defined(TARGET_MIPS64)
3867 check_insn(env
, ctx
, ISA_MIPS3
);
3868 gen_helper_mtc0_xcontext(arg
);
3877 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3880 gen_helper_mtc0_framemask(arg
);
3889 rn
= "Diagnostic"; /* implementation dependent */
3894 gen_helper_mtc0_debug(arg
); /* EJTAG support */
3895 /* BS_STOP isn't good enough here, hflags may have changed. */
3896 gen_save_pc(ctx
->pc
+ 4);
3897 ctx
->bstate
= BS_EXCP
;
3901 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
3902 rn
= "TraceControl";
3903 /* Stop translation as we may have switched the execution mode */
3904 ctx
->bstate
= BS_STOP
;
3907 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
3908 rn
= "TraceControl2";
3909 /* Stop translation as we may have switched the execution mode */
3910 ctx
->bstate
= BS_STOP
;
3913 /* Stop translation as we may have switched the execution mode */
3914 ctx
->bstate
= BS_STOP
;
3915 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
3916 rn
= "UserTraceData";
3917 /* Stop translation as we may have switched the execution mode */
3918 ctx
->bstate
= BS_STOP
;
3921 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
3922 /* Stop translation as we may have switched the execution mode */
3923 ctx
->bstate
= BS_STOP
;
3934 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_DEPC
));
3944 gen_helper_mtc0_performance0(arg
);
3945 rn
= "Performance0";
3948 // gen_helper_mtc0_performance1(arg);
3949 rn
= "Performance1";
3952 // gen_helper_mtc0_performance2(arg);
3953 rn
= "Performance2";
3956 // gen_helper_mtc0_performance3(arg);
3957 rn
= "Performance3";
3960 // gen_helper_mtc0_performance4(arg);
3961 rn
= "Performance4";
3964 // gen_helper_mtc0_performance5(arg);
3965 rn
= "Performance5";
3968 // gen_helper_mtc0_performance6(arg);
3969 rn
= "Performance6";
3972 // gen_helper_mtc0_performance7(arg);
3973 rn
= "Performance7";
3999 gen_helper_mtc0_taglo(arg
);
4006 gen_helper_mtc0_datalo(arg
);
4019 gen_helper_mtc0_taghi(arg
);
4026 gen_helper_mtc0_datahi(arg
);
4037 gen_mtc0_store64(arg
, offsetof(CPUState
, CP0_ErrorEPC
));
4048 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_DESAVE
));
4054 /* Stop translation as we may have switched the execution mode */
4055 ctx
->bstate
= BS_STOP
;
4060 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4061 /* For simplicity assume that all writes can cause interrupts. */
4064 ctx
->bstate
= BS_STOP
;
4069 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4070 generate_exception(ctx
, EXCP_RI
);
4073 #if defined(TARGET_MIPS64)
4074 static void gen_dmfc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4076 const char *rn
= "invalid";
4079 check_insn(env
, ctx
, ISA_MIPS64
);
4085 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Index
));
4089 check_insn(env
, ctx
, ASE_MT
);
4090 gen_helper_mfc0_mvpcontrol(arg
);
4094 check_insn(env
, ctx
, ASE_MT
);
4095 gen_helper_mfc0_mvpconf0(arg
);
4099 check_insn(env
, ctx
, ASE_MT
);
4100 gen_helper_mfc0_mvpconf1(arg
);
4110 gen_helper_mfc0_random(arg
);
4114 check_insn(env
, ctx
, ASE_MT
);
4115 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEControl
));
4119 check_insn(env
, ctx
, ASE_MT
);
4120 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf0
));
4124 check_insn(env
, ctx
, ASE_MT
);
4125 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEConf1
));
4129 check_insn(env
, ctx
, ASE_MT
);
4130 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_YQMask
));
4134 check_insn(env
, ctx
, ASE_MT
);
4135 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4139 check_insn(env
, ctx
, ASE_MT
);
4140 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4141 rn
= "VPEScheFBack";
4144 check_insn(env
, ctx
, ASE_MT
);
4145 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_VPEOpt
));
4155 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo0
));
4159 check_insn(env
, ctx
, ASE_MT
);
4160 gen_helper_mfc0_tcstatus(arg
);
4164 check_insn(env
, ctx
, ASE_MT
);
4165 gen_helper_mfc0_tcbind(arg
);
4169 check_insn(env
, ctx
, ASE_MT
);
4170 gen_helper_dmfc0_tcrestart(arg
);
4174 check_insn(env
, ctx
, ASE_MT
);
4175 gen_helper_dmfc0_tchalt(arg
);
4179 check_insn(env
, ctx
, ASE_MT
);
4180 gen_helper_dmfc0_tccontext(arg
);
4184 check_insn(env
, ctx
, ASE_MT
);
4185 gen_helper_dmfc0_tcschedule(arg
);
4189 check_insn(env
, ctx
, ASE_MT
);
4190 gen_helper_dmfc0_tcschefback(arg
);
4200 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryLo1
));
4210 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_Context
));
4214 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4215 rn
= "ContextConfig";
4224 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageMask
));
4228 check_insn(env
, ctx
, ISA_MIPS32R2
);
4229 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PageGrain
));
4239 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Wired
));
4243 check_insn(env
, ctx
, ISA_MIPS32R2
);
4244 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf0
));
4248 check_insn(env
, ctx
, ISA_MIPS32R2
);
4249 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf1
));
4253 check_insn(env
, ctx
, ISA_MIPS32R2
);
4254 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf2
));
4258 check_insn(env
, ctx
, ISA_MIPS32R2
);
4259 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf3
));
4263 check_insn(env
, ctx
, ISA_MIPS32R2
);
4264 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSConf4
));
4274 check_insn(env
, ctx
, ISA_MIPS32R2
);
4275 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_HWREna
));
4285 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_BadVAddr
));
4295 /* Mark as an IO operation because we read the time. */
4298 gen_helper_mfc0_count(arg
);
4301 ctx
->bstate
= BS_STOP
;
4305 /* 6,7 are implementation dependent */
4313 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EntryHi
));
4323 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Compare
));
4326 /* 6,7 are implementation dependent */
4334 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Status
));
4338 check_insn(env
, ctx
, ISA_MIPS32R2
);
4339 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_IntCtl
));
4343 check_insn(env
, ctx
, ISA_MIPS32R2
);
4344 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSCtl
));
4348 check_insn(env
, ctx
, ISA_MIPS32R2
);
4349 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_SRSMap
));
4359 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Cause
));
4369 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4379 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_PRid
));
4383 check_insn(env
, ctx
, ISA_MIPS32R2
);
4384 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_EBase
));
4394 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config0
));
4398 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config1
));
4402 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config2
));
4406 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config3
));
4409 /* 6,7 are implementation dependent */
4411 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config6
));
4415 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Config7
));
4425 gen_helper_dmfc0_lladdr(arg
);
4435 gen_helper_1i(dmfc0_watchlo
, arg
, sel
);
4445 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
4455 check_insn(env
, ctx
, ISA_MIPS3
);
4456 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_XContext
));
4464 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4467 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Framemask
));
4475 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4476 rn
= "'Diagnostic"; /* implementation dependent */
4481 gen_helper_mfc0_debug(arg
); /* EJTAG support */
4485 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4486 rn
= "TraceControl";
4489 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4490 rn
= "TraceControl2";
4493 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4494 rn
= "UserTraceData";
4497 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4508 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
4518 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_Performance0
));
4519 rn
= "Performance0";
4522 // gen_helper_dmfc0_performance1(arg);
4523 rn
= "Performance1";
4526 // gen_helper_dmfc0_performance2(arg);
4527 rn
= "Performance2";
4530 // gen_helper_dmfc0_performance3(arg);
4531 rn
= "Performance3";
4534 // gen_helper_dmfc0_performance4(arg);
4535 rn
= "Performance4";
4538 // gen_helper_dmfc0_performance5(arg);
4539 rn
= "Performance5";
4542 // gen_helper_dmfc0_performance6(arg);
4543 rn
= "Performance6";
4546 // gen_helper_dmfc0_performance7(arg);
4547 rn
= "Performance7";
4554 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4561 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4574 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagLo
));
4581 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataLo
));
4594 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_TagHi
));
4601 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DataHi
));
4611 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
4622 gen_mfc0_load32(arg
, offsetof(CPUState
, CP0_DESAVE
));
4632 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4636 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4637 generate_exception(ctx
, EXCP_RI
);
4640 static void gen_dmtc0 (CPUState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4642 const char *rn
= "invalid";
4645 check_insn(env
, ctx
, ISA_MIPS64
);
4654 gen_helper_mtc0_index(arg
);
4658 check_insn(env
, ctx
, ASE_MT
);
4659 gen_helper_mtc0_mvpcontrol(arg
);
4663 check_insn(env
, ctx
, ASE_MT
);
4668 check_insn(env
, ctx
, ASE_MT
);
4683 check_insn(env
, ctx
, ASE_MT
);
4684 gen_helper_mtc0_vpecontrol(arg
);
4688 check_insn(env
, ctx
, ASE_MT
);
4689 gen_helper_mtc0_vpeconf0(arg
);
4693 check_insn(env
, ctx
, ASE_MT
);
4694 gen_helper_mtc0_vpeconf1(arg
);
4698 check_insn(env
, ctx
, ASE_MT
);
4699 gen_helper_mtc0_yqmask(arg
);
4703 check_insn(env
, ctx
, ASE_MT
);
4704 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPESchedule
));
4708 check_insn(env
, ctx
, ASE_MT
);
4709 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_VPEScheFBack
));
4710 rn
= "VPEScheFBack";
4713 check_insn(env
, ctx
, ASE_MT
);
4714 gen_helper_mtc0_vpeopt(arg
);
4724 gen_helper_mtc0_entrylo0(arg
);
4728 check_insn(env
, ctx
, ASE_MT
);
4729 gen_helper_mtc0_tcstatus(arg
);
4733 check_insn(env
, ctx
, ASE_MT
);
4734 gen_helper_mtc0_tcbind(arg
);
4738 check_insn(env
, ctx
, ASE_MT
);
4739 gen_helper_mtc0_tcrestart(arg
);
4743 check_insn(env
, ctx
, ASE_MT
);
4744 gen_helper_mtc0_tchalt(arg
);
4748 check_insn(env
, ctx
, ASE_MT
);
4749 gen_helper_mtc0_tccontext(arg
);
4753 check_insn(env
, ctx
, ASE_MT
);
4754 gen_helper_mtc0_tcschedule(arg
);
4758 check_insn(env
, ctx
, ASE_MT
);
4759 gen_helper_mtc0_tcschefback(arg
);
4769 gen_helper_mtc0_entrylo1(arg
);
4779 gen_helper_mtc0_context(arg
);
4783 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
4784 rn
= "ContextConfig";
4793 gen_helper_mtc0_pagemask(arg
);
4797 check_insn(env
, ctx
, ISA_MIPS32R2
);
4798 gen_helper_mtc0_pagegrain(arg
);
4808 gen_helper_mtc0_wired(arg
);
4812 check_insn(env
, ctx
, ISA_MIPS32R2
);
4813 gen_helper_mtc0_srsconf0(arg
);
4817 check_insn(env
, ctx
, ISA_MIPS32R2
);
4818 gen_helper_mtc0_srsconf1(arg
);
4822 check_insn(env
, ctx
, ISA_MIPS32R2
);
4823 gen_helper_mtc0_srsconf2(arg
);
4827 check_insn(env
, ctx
, ISA_MIPS32R2
);
4828 gen_helper_mtc0_srsconf3(arg
);
4832 check_insn(env
, ctx
, ISA_MIPS32R2
);
4833 gen_helper_mtc0_srsconf4(arg
);
4843 check_insn(env
, ctx
, ISA_MIPS32R2
);
4844 gen_helper_mtc0_hwrena(arg
);
4858 gen_helper_mtc0_count(arg
);
4861 /* 6,7 are implementation dependent */
4865 /* Stop translation as we may have switched the execution mode */
4866 ctx
->bstate
= BS_STOP
;
4871 gen_helper_mtc0_entryhi(arg
);
4881 gen_helper_mtc0_compare(arg
);
4884 /* 6,7 are implementation dependent */
4888 /* Stop translation as we may have switched the execution mode */
4889 ctx
->bstate
= BS_STOP
;
4894 save_cpu_state(ctx
, 1);
4895 gen_helper_mtc0_status(arg
);
4896 /* BS_STOP isn't good enough here, hflags may have changed. */
4897 gen_save_pc(ctx
->pc
+ 4);
4898 ctx
->bstate
= BS_EXCP
;
4902 check_insn(env
, ctx
, ISA_MIPS32R2
);
4903 gen_helper_mtc0_intctl(arg
);
4904 /* Stop translation as we may have switched the execution mode */
4905 ctx
->bstate
= BS_STOP
;
4909 check_insn(env
, ctx
, ISA_MIPS32R2
);
4910 gen_helper_mtc0_srsctl(arg
);
4911 /* Stop translation as we may have switched the execution mode */
4912 ctx
->bstate
= BS_STOP
;
4916 check_insn(env
, ctx
, ISA_MIPS32R2
);
4917 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_SRSMap
));
4918 /* Stop translation as we may have switched the execution mode */
4919 ctx
->bstate
= BS_STOP
;
4929 save_cpu_state(ctx
, 1);
4930 gen_helper_mtc0_cause(arg
);
4940 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_EPC
));
4954 check_insn(env
, ctx
, ISA_MIPS32R2
);
4955 gen_helper_mtc0_ebase(arg
);
4965 gen_helper_mtc0_config0(arg
);
4967 /* Stop translation as we may have switched the execution mode */
4968 ctx
->bstate
= BS_STOP
;
4971 /* ignored, read only */
4975 gen_helper_mtc0_config2(arg
);
4977 /* Stop translation as we may have switched the execution mode */
4978 ctx
->bstate
= BS_STOP
;
4984 /* 6,7 are implementation dependent */
4986 rn
= "Invalid config selector";
4993 gen_helper_mtc0_lladdr(arg
);
5003 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
5013 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
5023 check_insn(env
, ctx
, ISA_MIPS3
);
5024 gen_helper_mtc0_xcontext(arg
);
5032 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5035 gen_helper_mtc0_framemask(arg
);
5044 rn
= "Diagnostic"; /* implementation dependent */
5049 gen_helper_mtc0_debug(arg
); /* EJTAG support */
5050 /* BS_STOP isn't good enough here, hflags may have changed. */
5051 gen_save_pc(ctx
->pc
+ 4);
5052 ctx
->bstate
= BS_EXCP
;
5056 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
5057 /* Stop translation as we may have switched the execution mode */
5058 ctx
->bstate
= BS_STOP
;
5059 rn
= "TraceControl";
5062 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
5063 /* Stop translation as we may have switched the execution mode */
5064 ctx
->bstate
= BS_STOP
;
5065 rn
= "TraceControl2";
5068 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
5069 /* Stop translation as we may have switched the execution mode */
5070 ctx
->bstate
= BS_STOP
;
5071 rn
= "UserTraceData";
5074 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
5075 /* Stop translation as we may have switched the execution mode */
5076 ctx
->bstate
= BS_STOP
;
5087 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_DEPC
));
5097 gen_helper_mtc0_performance0(arg
);
5098 rn
= "Performance0";
5101 // gen_helper_mtc0_performance1(arg);
5102 rn
= "Performance1";
5105 // gen_helper_mtc0_performance2(arg);
5106 rn
= "Performance2";
5109 // gen_helper_mtc0_performance3(arg);
5110 rn
= "Performance3";
5113 // gen_helper_mtc0_performance4(arg);
5114 rn
= "Performance4";
5117 // gen_helper_mtc0_performance5(arg);
5118 rn
= "Performance5";
5121 // gen_helper_mtc0_performance6(arg);
5122 rn
= "Performance6";
5125 // gen_helper_mtc0_performance7(arg);
5126 rn
= "Performance7";
5152 gen_helper_mtc0_taglo(arg
);
5159 gen_helper_mtc0_datalo(arg
);
5172 gen_helper_mtc0_taghi(arg
);
5179 gen_helper_mtc0_datahi(arg
);
5190 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUState
, CP0_ErrorEPC
));
5201 gen_mtc0_store32(arg
, offsetof(CPUState
, CP0_DESAVE
));
5207 /* Stop translation as we may have switched the execution mode */
5208 ctx
->bstate
= BS_STOP
;
5213 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5214 /* For simplicity assume that all writes can cause interrupts. */
5217 ctx
->bstate
= BS_STOP
;
5222 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5223 generate_exception(ctx
, EXCP_RI
);
5225 #endif /* TARGET_MIPS64 */
5227 static void gen_mftr(CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5228 int u
, int sel
, int h
)
5230 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5231 TCGv t0
= tcg_temp_local_new();
5233 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5234 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5235 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5236 tcg_gen_movi_tl(t0
, -1);
5237 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5238 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5239 tcg_gen_movi_tl(t0
, -1);
5245 gen_helper_mftc0_tcstatus(t0
);
5248 gen_helper_mftc0_tcbind(t0
);
5251 gen_helper_mftc0_tcrestart(t0
);
5254 gen_helper_mftc0_tchalt(t0
);
5257 gen_helper_mftc0_tccontext(t0
);
5260 gen_helper_mftc0_tcschedule(t0
);
5263 gen_helper_mftc0_tcschefback(t0
);
5266 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5273 gen_helper_mftc0_entryhi(t0
);
5276 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5282 gen_helper_mftc0_status(t0
);
5285 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5291 gen_helper_mftc0_debug(t0
);
5294 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5299 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5301 } else switch (sel
) {
5302 /* GPR registers. */
5304 gen_helper_1i(mftgpr
, t0
, rt
);
5306 /* Auxiliary CPU registers */
5310 gen_helper_1i(mftlo
, t0
, 0);
5313 gen_helper_1i(mfthi
, t0
, 0);
5316 gen_helper_1i(mftacx
, t0
, 0);
5319 gen_helper_1i(mftlo
, t0
, 1);
5322 gen_helper_1i(mfthi
, t0
, 1);
5325 gen_helper_1i(mftacx
, t0
, 1);
5328 gen_helper_1i(mftlo
, t0
, 2);
5331 gen_helper_1i(mfthi
, t0
, 2);
5334 gen_helper_1i(mftacx
, t0
, 2);
5337 gen_helper_1i(mftlo
, t0
, 3);
5340 gen_helper_1i(mfthi
, t0
, 3);
5343 gen_helper_1i(mftacx
, t0
, 3);
5346 gen_helper_mftdsp(t0
);
5352 /* Floating point (COP1). */
5354 /* XXX: For now we support only a single FPU context. */
5356 TCGv_i32 fp0
= tcg_temp_new_i32();
5358 gen_load_fpr32(fp0
, rt
);
5359 tcg_gen_ext_i32_tl(t0
, fp0
);
5360 tcg_temp_free_i32(fp0
);
5362 TCGv_i32 fp0
= tcg_temp_new_i32();
5364 gen_load_fpr32h(fp0
, rt
);
5365 tcg_gen_ext_i32_tl(t0
, fp0
);
5366 tcg_temp_free_i32(fp0
);
5370 /* XXX: For now we support only a single FPU context. */
5371 gen_helper_1i(cfc1
, t0
, rt
);
5373 /* COP2: Not implemented. */
5380 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5381 gen_store_gpr(t0
, rd
);
5387 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5388 generate_exception(ctx
, EXCP_RI
);
5391 static void gen_mttr(CPUState
*env
, DisasContext
*ctx
, int rd
, int rt
,
5392 int u
, int sel
, int h
)
5394 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5395 TCGv t0
= tcg_temp_local_new();
5397 gen_load_gpr(t0
, rt
);
5398 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5399 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5400 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5402 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5403 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5410 gen_helper_mttc0_tcstatus(t0
);
5413 gen_helper_mttc0_tcbind(t0
);
5416 gen_helper_mttc0_tcrestart(t0
);
5419 gen_helper_mttc0_tchalt(t0
);
5422 gen_helper_mttc0_tccontext(t0
);
5425 gen_helper_mttc0_tcschedule(t0
);
5428 gen_helper_mttc0_tcschefback(t0
);
5431 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5438 gen_helper_mttc0_entryhi(t0
);
5441 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5447 gen_helper_mttc0_status(t0
);
5450 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5456 gen_helper_mttc0_debug(t0
);
5459 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5464 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5466 } else switch (sel
) {
5467 /* GPR registers. */
5469 gen_helper_1i(mttgpr
, t0
, rd
);
5471 /* Auxiliary CPU registers */
5475 gen_helper_1i(mttlo
, t0
, 0);
5478 gen_helper_1i(mtthi
, t0
, 0);
5481 gen_helper_1i(mttacx
, t0
, 0);
5484 gen_helper_1i(mttlo
, t0
, 1);
5487 gen_helper_1i(mtthi
, t0
, 1);
5490 gen_helper_1i(mttacx
, t0
, 1);
5493 gen_helper_1i(mttlo
, t0
, 2);
5496 gen_helper_1i(mtthi
, t0
, 2);
5499 gen_helper_1i(mttacx
, t0
, 2);
5502 gen_helper_1i(mttlo
, t0
, 3);
5505 gen_helper_1i(mtthi
, t0
, 3);
5508 gen_helper_1i(mttacx
, t0
, 3);
5511 gen_helper_mttdsp(t0
);
5517 /* Floating point (COP1). */
5519 /* XXX: For now we support only a single FPU context. */
5521 TCGv_i32 fp0
= tcg_temp_new_i32();
5523 tcg_gen_trunc_tl_i32(fp0
, t0
);
5524 gen_store_fpr32(fp0
, rd
);
5525 tcg_temp_free_i32(fp0
);
5527 TCGv_i32 fp0
= tcg_temp_new_i32();
5529 tcg_gen_trunc_tl_i32(fp0
, t0
);
5530 gen_store_fpr32h(fp0
, rd
);
5531 tcg_temp_free_i32(fp0
);
5535 /* XXX: For now we support only a single FPU context. */
5536 gen_helper_1i(ctc1
, t0
, rd
);
5538 /* COP2: Not implemented. */
5545 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5551 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5552 generate_exception(ctx
, EXCP_RI
);
5555 static void gen_cp0 (CPUState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
5557 const char *opn
= "ldst";
5565 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5570 TCGv t0
= tcg_temp_new();
5572 gen_load_gpr(t0
, rt
);
5573 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5578 #if defined(TARGET_MIPS64)
5580 check_insn(env
, ctx
, ISA_MIPS3
);
5585 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5589 check_insn(env
, ctx
, ISA_MIPS3
);
5591 TCGv t0
= tcg_temp_new();
5593 gen_load_gpr(t0
, rt
);
5594 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5601 check_insn(env
, ctx
, ASE_MT
);
5606 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
5607 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5611 check_insn(env
, ctx
, ASE_MT
);
5612 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
5613 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5618 if (!env
->tlb
->helper_tlbwi
)
5624 if (!env
->tlb
->helper_tlbwr
)
5630 if (!env
->tlb
->helper_tlbp
)
5636 if (!env
->tlb
->helper_tlbr
)
5642 check_insn(env
, ctx
, ISA_MIPS2
);
5644 ctx
->bstate
= BS_EXCP
;
5648 check_insn(env
, ctx
, ISA_MIPS32
);
5649 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
5651 generate_exception(ctx
, EXCP_RI
);
5654 ctx
->bstate
= BS_EXCP
;
5659 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
5660 /* If we get an exception, we want to restart at next instruction */
5662 save_cpu_state(ctx
, 1);
5665 ctx
->bstate
= BS_EXCP
;
5670 generate_exception(ctx
, EXCP_RI
);
5673 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
5675 #endif /* !CONFIG_USER_ONLY */
5677 /* CP1 Branches (before delay slot) */
5678 static void gen_compute_branch1 (CPUState
*env
, DisasContext
*ctx
, uint32_t op
,
5679 int32_t cc
, int32_t offset
)
5681 target_ulong btarget
;
5682 const char *opn
= "cp1 cond branch";
5683 TCGv_i32 t0
= tcg_temp_new_i32();
5686 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
5688 btarget
= ctx
->pc
+ 4 + offset
;
5692 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5693 tcg_gen_not_i32(t0
, t0
);
5694 tcg_gen_andi_i32(t0
, t0
, 1);
5695 tcg_gen_extu_i32_tl(bcond
, t0
);
5699 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5700 tcg_gen_not_i32(t0
, t0
);
5701 tcg_gen_andi_i32(t0
, t0
, 1);
5702 tcg_gen_extu_i32_tl(bcond
, t0
);
5706 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5707 tcg_gen_andi_i32(t0
, t0
, 1);
5708 tcg_gen_extu_i32_tl(bcond
, t0
);
5712 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5713 tcg_gen_andi_i32(t0
, t0
, 1);
5714 tcg_gen_extu_i32_tl(bcond
, t0
);
5717 ctx
->hflags
|= MIPS_HFLAG_BL
;
5721 TCGv_i32 t1
= tcg_temp_new_i32();
5722 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5723 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
5724 tcg_gen_nor_i32(t0
, t0
, t1
);
5725 tcg_temp_free_i32(t1
);
5726 tcg_gen_andi_i32(t0
, t0
, 1);
5727 tcg_gen_extu_i32_tl(bcond
, t0
);
5733 TCGv_i32 t1
= tcg_temp_new_i32();
5734 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5735 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
5736 tcg_gen_or_i32(t0
, t0
, t1
);
5737 tcg_temp_free_i32(t1
);
5738 tcg_gen_andi_i32(t0
, t0
, 1);
5739 tcg_gen_extu_i32_tl(bcond
, t0
);
5745 TCGv_i32 t1
= tcg_temp_new_i32();
5746 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5747 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
5748 tcg_gen_or_i32(t0
, t0
, t1
);
5749 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
5750 tcg_gen_or_i32(t0
, t0
, t1
);
5751 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
5752 tcg_gen_nor_i32(t0
, t0
, t1
);
5753 tcg_temp_free_i32(t1
);
5754 tcg_gen_andi_i32(t0
, t0
, 1);
5755 tcg_gen_extu_i32_tl(bcond
, t0
);
5761 TCGv_i32 t1
= tcg_temp_new_i32();
5762 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
5763 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
5764 tcg_gen_or_i32(t0
, t0
, t1
);
5765 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
5766 tcg_gen_or_i32(t0
, t0
, t1
);
5767 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
5768 tcg_gen_or_i32(t0
, t0
, t1
);
5769 tcg_temp_free_i32(t1
);
5770 tcg_gen_andi_i32(t0
, t0
, 1);
5771 tcg_gen_extu_i32_tl(bcond
, t0
);
5775 ctx
->hflags
|= MIPS_HFLAG_BC
;
5779 generate_exception (ctx
, EXCP_RI
);
5782 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
5783 ctx
->hflags
, btarget
);
5784 ctx
->btarget
= btarget
;
5787 tcg_temp_free_i32(t0
);
5790 /* Coprocessor 1 (FPU) */
5792 #define FOP(func, fmt) (((fmt) << 21) | (func))
5795 OPC_ADD_S
= FOP(0, FMT_S
),
5796 OPC_SUB_S
= FOP(1, FMT_S
),
5797 OPC_MUL_S
= FOP(2, FMT_S
),
5798 OPC_DIV_S
= FOP(3, FMT_S
),
5799 OPC_SQRT_S
= FOP(4, FMT_S
),
5800 OPC_ABS_S
= FOP(5, FMT_S
),
5801 OPC_MOV_S
= FOP(6, FMT_S
),
5802 OPC_NEG_S
= FOP(7, FMT_S
),
5803 OPC_ROUND_L_S
= FOP(8, FMT_S
),
5804 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
5805 OPC_CEIL_L_S
= FOP(10, FMT_S
),
5806 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
5807 OPC_ROUND_W_S
= FOP(12, FMT_S
),
5808 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
5809 OPC_CEIL_W_S
= FOP(14, FMT_S
),
5810 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
5811 OPC_MOVCF_S
= FOP(17, FMT_S
),
5812 OPC_MOVZ_S
= FOP(18, FMT_S
),
5813 OPC_MOVN_S
= FOP(19, FMT_S
),
5814 OPC_RECIP_S
= FOP(21, FMT_S
),
5815 OPC_RSQRT_S
= FOP(22, FMT_S
),
5816 OPC_RECIP2_S
= FOP(28, FMT_S
),
5817 OPC_RECIP1_S
= FOP(29, FMT_S
),
5818 OPC_RSQRT1_S
= FOP(30, FMT_S
),
5819 OPC_RSQRT2_S
= FOP(31, FMT_S
),
5820 OPC_CVT_D_S
= FOP(33, FMT_S
),
5821 OPC_CVT_W_S
= FOP(36, FMT_S
),
5822 OPC_CVT_L_S
= FOP(37, FMT_S
),
5823 OPC_CVT_PS_S
= FOP(38, FMT_S
),
5824 OPC_CMP_F_S
= FOP (48, FMT_S
),
5825 OPC_CMP_UN_S
= FOP (49, FMT_S
),
5826 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
5827 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
5828 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
5829 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
5830 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
5831 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
5832 OPC_CMP_SF_S
= FOP (56, FMT_S
),
5833 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
5834 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
5835 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
5836 OPC_CMP_LT_S
= FOP (60, FMT_S
),
5837 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
5838 OPC_CMP_LE_S
= FOP (62, FMT_S
),
5839 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
5841 OPC_ADD_D
= FOP(0, FMT_D
),
5842 OPC_SUB_D
= FOP(1, FMT_D
),
5843 OPC_MUL_D
= FOP(2, FMT_D
),
5844 OPC_DIV_D
= FOP(3, FMT_D
),
5845 OPC_SQRT_D
= FOP(4, FMT_D
),
5846 OPC_ABS_D
= FOP(5, FMT_D
),
5847 OPC_MOV_D
= FOP(6, FMT_D
),
5848 OPC_NEG_D
= FOP(7, FMT_D
),
5849 OPC_ROUND_L_D
= FOP(8, FMT_D
),
5850 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
5851 OPC_CEIL_L_D
= FOP(10, FMT_D
),
5852 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
5853 OPC_ROUND_W_D
= FOP(12, FMT_D
),
5854 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
5855 OPC_CEIL_W_D
= FOP(14, FMT_D
),
5856 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
5857 OPC_MOVCF_D
= FOP(17, FMT_D
),
5858 OPC_MOVZ_D
= FOP(18, FMT_D
),
5859 OPC_MOVN_D
= FOP(19, FMT_D
),
5860 OPC_RECIP_D
= FOP(21, FMT_D
),
5861 OPC_RSQRT_D
= FOP(22, FMT_D
),
5862 OPC_RECIP2_D
= FOP(28, FMT_D
),
5863 OPC_RECIP1_D
= FOP(29, FMT_D
),
5864 OPC_RSQRT1_D
= FOP(30, FMT_D
),
5865 OPC_RSQRT2_D
= FOP(31, FMT_D
),
5866 OPC_CVT_S_D
= FOP(32, FMT_D
),
5867 OPC_CVT_W_D
= FOP(36, FMT_D
),
5868 OPC_CVT_L_D
= FOP(37, FMT_D
),
5869 OPC_CMP_F_D
= FOP (48, FMT_D
),
5870 OPC_CMP_UN_D
= FOP (49, FMT_D
),
5871 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
5872 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
5873 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
5874 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
5875 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
5876 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
5877 OPC_CMP_SF_D
= FOP (56, FMT_D
),
5878 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
5879 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
5880 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
5881 OPC_CMP_LT_D
= FOP (60, FMT_D
),
5882 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
5883 OPC_CMP_LE_D
= FOP (62, FMT_D
),
5884 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
5886 OPC_CVT_S_W
= FOP(32, FMT_W
),
5887 OPC_CVT_D_W
= FOP(33, FMT_W
),
5888 OPC_CVT_S_L
= FOP(32, FMT_L
),
5889 OPC_CVT_D_L
= FOP(33, FMT_L
),
5890 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
5892 OPC_ADD_PS
= FOP(0, FMT_PS
),
5893 OPC_SUB_PS
= FOP(1, FMT_PS
),
5894 OPC_MUL_PS
= FOP(2, FMT_PS
),
5895 OPC_DIV_PS
= FOP(3, FMT_PS
),
5896 OPC_ABS_PS
= FOP(5, FMT_PS
),
5897 OPC_MOV_PS
= FOP(6, FMT_PS
),
5898 OPC_NEG_PS
= FOP(7, FMT_PS
),
5899 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
5900 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
5901 OPC_MOVN_PS
= FOP(19, FMT_PS
),
5902 OPC_ADDR_PS
= FOP(24, FMT_PS
),
5903 OPC_MULR_PS
= FOP(26, FMT_PS
),
5904 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
5905 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
5906 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
5907 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
5909 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
5910 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
5911 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
5912 OPC_PLL_PS
= FOP(44, FMT_PS
),
5913 OPC_PLU_PS
= FOP(45, FMT_PS
),
5914 OPC_PUL_PS
= FOP(46, FMT_PS
),
5915 OPC_PUU_PS
= FOP(47, FMT_PS
),
5916 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
5917 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
5918 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
5919 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
5920 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
5921 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
5922 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
5923 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
5924 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
5925 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
5926 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
5927 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
5928 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
5929 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
5930 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
5931 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
5934 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
5936 const char *opn
= "cp1 move";
5937 TCGv t0
= tcg_temp_new();
5942 TCGv_i32 fp0
= tcg_temp_new_i32();
5944 gen_load_fpr32(fp0
, fs
);
5945 tcg_gen_ext_i32_tl(t0
, fp0
);
5946 tcg_temp_free_i32(fp0
);
5948 gen_store_gpr(t0
, rt
);
5952 gen_load_gpr(t0
, rt
);
5954 TCGv_i32 fp0
= tcg_temp_new_i32();
5956 tcg_gen_trunc_tl_i32(fp0
, t0
);
5957 gen_store_fpr32(fp0
, fs
);
5958 tcg_temp_free_i32(fp0
);
5963 gen_helper_1i(cfc1
, t0
, fs
);
5964 gen_store_gpr(t0
, rt
);
5968 gen_load_gpr(t0
, rt
);
5969 gen_helper_1i(ctc1
, t0
, fs
);
5972 #if defined(TARGET_MIPS64)
5974 gen_load_fpr64(ctx
, t0
, fs
);
5975 gen_store_gpr(t0
, rt
);
5979 gen_load_gpr(t0
, rt
);
5980 gen_store_fpr64(ctx
, t0
, fs
);
5986 TCGv_i32 fp0
= tcg_temp_new_i32();
5988 gen_load_fpr32h(fp0
, fs
);
5989 tcg_gen_ext_i32_tl(t0
, fp0
);
5990 tcg_temp_free_i32(fp0
);
5992 gen_store_gpr(t0
, rt
);
5996 gen_load_gpr(t0
, rt
);
5998 TCGv_i32 fp0
= tcg_temp_new_i32();
6000 tcg_gen_trunc_tl_i32(fp0
, t0
);
6001 gen_store_fpr32h(fp0
, fs
);
6002 tcg_temp_free_i32(fp0
);
6008 generate_exception (ctx
, EXCP_RI
);
6011 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6017 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6033 l1
= gen_new_label();
6034 t0
= tcg_temp_new_i32();
6035 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6036 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6037 tcg_temp_free_i32(t0
);
6039 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6041 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6046 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6049 TCGv_i32 t0
= tcg_temp_new_i32();
6050 int l1
= gen_new_label();
6057 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6058 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6059 gen_load_fpr32(t0
, fs
);
6060 gen_store_fpr32(t0
, fd
);
6062 tcg_temp_free_i32(t0
);
6065 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6068 TCGv_i32 t0
= tcg_temp_new_i32();
6070 int l1
= gen_new_label();
6077 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6078 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6079 tcg_temp_free_i32(t0
);
6080 fp0
= tcg_temp_new_i64();
6081 gen_load_fpr64(ctx
, fp0
, fs
);
6082 gen_store_fpr64(ctx
, fp0
, fd
);
6083 tcg_temp_free_i64(fp0
);
6087 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6090 TCGv_i32 t0
= tcg_temp_new_i32();
6091 int l1
= gen_new_label();
6092 int l2
= gen_new_label();
6099 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6100 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6101 gen_load_fpr32(t0
, fs
);
6102 gen_store_fpr32(t0
, fd
);
6105 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6106 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6107 gen_load_fpr32h(t0
, fs
);
6108 gen_store_fpr32h(t0
, fd
);
6109 tcg_temp_free_i32(t0
);
6114 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6115 int ft
, int fs
, int fd
, int cc
)
6117 const char *opn
= "farith";
6118 const char *condnames
[] = {
6136 const char *condnames_abs
[] = {
6154 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6155 uint32_t func
= ctx
->opcode
& 0x3f;
6160 TCGv_i32 fp0
= tcg_temp_new_i32();
6161 TCGv_i32 fp1
= tcg_temp_new_i32();
6163 gen_load_fpr32(fp0
, fs
);
6164 gen_load_fpr32(fp1
, ft
);
6165 gen_helper_float_add_s(fp0
, fp0
, fp1
);
6166 tcg_temp_free_i32(fp1
);
6167 gen_store_fpr32(fp0
, fd
);
6168 tcg_temp_free_i32(fp0
);
6175 TCGv_i32 fp0
= tcg_temp_new_i32();
6176 TCGv_i32 fp1
= tcg_temp_new_i32();
6178 gen_load_fpr32(fp0
, fs
);
6179 gen_load_fpr32(fp1
, ft
);
6180 gen_helper_float_sub_s(fp0
, fp0
, fp1
);
6181 tcg_temp_free_i32(fp1
);
6182 gen_store_fpr32(fp0
, fd
);
6183 tcg_temp_free_i32(fp0
);
6190 TCGv_i32 fp0
= tcg_temp_new_i32();
6191 TCGv_i32 fp1
= tcg_temp_new_i32();
6193 gen_load_fpr32(fp0
, fs
);
6194 gen_load_fpr32(fp1
, ft
);
6195 gen_helper_float_mul_s(fp0
, fp0
, fp1
);
6196 tcg_temp_free_i32(fp1
);
6197 gen_store_fpr32(fp0
, fd
);
6198 tcg_temp_free_i32(fp0
);
6205 TCGv_i32 fp0
= tcg_temp_new_i32();
6206 TCGv_i32 fp1
= tcg_temp_new_i32();
6208 gen_load_fpr32(fp0
, fs
);
6209 gen_load_fpr32(fp1
, ft
);
6210 gen_helper_float_div_s(fp0
, fp0
, fp1
);
6211 tcg_temp_free_i32(fp1
);
6212 gen_store_fpr32(fp0
, fd
);
6213 tcg_temp_free_i32(fp0
);
6220 TCGv_i32 fp0
= tcg_temp_new_i32();
6222 gen_load_fpr32(fp0
, fs
);
6223 gen_helper_float_sqrt_s(fp0
, fp0
);
6224 gen_store_fpr32(fp0
, fd
);
6225 tcg_temp_free_i32(fp0
);
6231 TCGv_i32 fp0
= tcg_temp_new_i32();
6233 gen_load_fpr32(fp0
, fs
);
6234 gen_helper_float_abs_s(fp0
, fp0
);
6235 gen_store_fpr32(fp0
, fd
);
6236 tcg_temp_free_i32(fp0
);
6242 TCGv_i32 fp0
= tcg_temp_new_i32();
6244 gen_load_fpr32(fp0
, fs
);
6245 gen_store_fpr32(fp0
, fd
);
6246 tcg_temp_free_i32(fp0
);
6252 TCGv_i32 fp0
= tcg_temp_new_i32();
6254 gen_load_fpr32(fp0
, fs
);
6255 gen_helper_float_chs_s(fp0
, fp0
);
6256 gen_store_fpr32(fp0
, fd
);
6257 tcg_temp_free_i32(fp0
);
6262 check_cp1_64bitmode(ctx
);
6264 TCGv_i32 fp32
= tcg_temp_new_i32();
6265 TCGv_i64 fp64
= tcg_temp_new_i64();
6267 gen_load_fpr32(fp32
, fs
);
6268 gen_helper_float_roundl_s(fp64
, fp32
);
6269 tcg_temp_free_i32(fp32
);
6270 gen_store_fpr64(ctx
, fp64
, fd
);
6271 tcg_temp_free_i64(fp64
);
6276 check_cp1_64bitmode(ctx
);
6278 TCGv_i32 fp32
= tcg_temp_new_i32();
6279 TCGv_i64 fp64
= tcg_temp_new_i64();
6281 gen_load_fpr32(fp32
, fs
);
6282 gen_helper_float_truncl_s(fp64
, fp32
);
6283 tcg_temp_free_i32(fp32
);
6284 gen_store_fpr64(ctx
, fp64
, fd
);
6285 tcg_temp_free_i64(fp64
);
6290 check_cp1_64bitmode(ctx
);
6292 TCGv_i32 fp32
= tcg_temp_new_i32();
6293 TCGv_i64 fp64
= tcg_temp_new_i64();
6295 gen_load_fpr32(fp32
, fs
);
6296 gen_helper_float_ceill_s(fp64
, fp32
);
6297 tcg_temp_free_i32(fp32
);
6298 gen_store_fpr64(ctx
, fp64
, fd
);
6299 tcg_temp_free_i64(fp64
);
6304 check_cp1_64bitmode(ctx
);
6306 TCGv_i32 fp32
= tcg_temp_new_i32();
6307 TCGv_i64 fp64
= tcg_temp_new_i64();
6309 gen_load_fpr32(fp32
, fs
);
6310 gen_helper_float_floorl_s(fp64
, fp32
);
6311 tcg_temp_free_i32(fp32
);
6312 gen_store_fpr64(ctx
, fp64
, fd
);
6313 tcg_temp_free_i64(fp64
);
6319 TCGv_i32 fp0
= tcg_temp_new_i32();
6321 gen_load_fpr32(fp0
, fs
);
6322 gen_helper_float_roundw_s(fp0
, fp0
);
6323 gen_store_fpr32(fp0
, fd
);
6324 tcg_temp_free_i32(fp0
);
6330 TCGv_i32 fp0
= tcg_temp_new_i32();
6332 gen_load_fpr32(fp0
, fs
);
6333 gen_helper_float_truncw_s(fp0
, fp0
);
6334 gen_store_fpr32(fp0
, fd
);
6335 tcg_temp_free_i32(fp0
);
6341 TCGv_i32 fp0
= tcg_temp_new_i32();
6343 gen_load_fpr32(fp0
, fs
);
6344 gen_helper_float_ceilw_s(fp0
, fp0
);
6345 gen_store_fpr32(fp0
, fd
);
6346 tcg_temp_free_i32(fp0
);
6352 TCGv_i32 fp0
= tcg_temp_new_i32();
6354 gen_load_fpr32(fp0
, fs
);
6355 gen_helper_float_floorw_s(fp0
, fp0
);
6356 gen_store_fpr32(fp0
, fd
);
6357 tcg_temp_free_i32(fp0
);
6362 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6367 int l1
= gen_new_label();
6371 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
6373 fp0
= tcg_temp_new_i32();
6374 gen_load_fpr32(fp0
, fs
);
6375 gen_store_fpr32(fp0
, fd
);
6376 tcg_temp_free_i32(fp0
);
6383 int l1
= gen_new_label();
6387 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
6388 fp0
= tcg_temp_new_i32();
6389 gen_load_fpr32(fp0
, fs
);
6390 gen_store_fpr32(fp0
, fd
);
6391 tcg_temp_free_i32(fp0
);
6400 TCGv_i32 fp0
= tcg_temp_new_i32();
6402 gen_load_fpr32(fp0
, fs
);
6403 gen_helper_float_recip_s(fp0
, fp0
);
6404 gen_store_fpr32(fp0
, fd
);
6405 tcg_temp_free_i32(fp0
);
6412 TCGv_i32 fp0
= tcg_temp_new_i32();
6414 gen_load_fpr32(fp0
, fs
);
6415 gen_helper_float_rsqrt_s(fp0
, fp0
);
6416 gen_store_fpr32(fp0
, fd
);
6417 tcg_temp_free_i32(fp0
);
6422 check_cp1_64bitmode(ctx
);
6424 TCGv_i32 fp0
= tcg_temp_new_i32();
6425 TCGv_i32 fp1
= tcg_temp_new_i32();
6427 gen_load_fpr32(fp0
, fs
);
6428 gen_load_fpr32(fp1
, fd
);
6429 gen_helper_float_recip2_s(fp0
, fp0
, fp1
);
6430 tcg_temp_free_i32(fp1
);
6431 gen_store_fpr32(fp0
, fd
);
6432 tcg_temp_free_i32(fp0
);
6437 check_cp1_64bitmode(ctx
);
6439 TCGv_i32 fp0
= tcg_temp_new_i32();
6441 gen_load_fpr32(fp0
, fs
);
6442 gen_helper_float_recip1_s(fp0
, fp0
);
6443 gen_store_fpr32(fp0
, fd
);
6444 tcg_temp_free_i32(fp0
);
6449 check_cp1_64bitmode(ctx
);
6451 TCGv_i32 fp0
= tcg_temp_new_i32();
6453 gen_load_fpr32(fp0
, fs
);
6454 gen_helper_float_rsqrt1_s(fp0
, fp0
);
6455 gen_store_fpr32(fp0
, fd
);
6456 tcg_temp_free_i32(fp0
);
6461 check_cp1_64bitmode(ctx
);
6463 TCGv_i32 fp0
= tcg_temp_new_i32();
6464 TCGv_i32 fp1
= tcg_temp_new_i32();
6466 gen_load_fpr32(fp0
, fs
);
6467 gen_load_fpr32(fp1
, ft
);
6468 gen_helper_float_rsqrt2_s(fp0
, fp0
, fp1
);
6469 tcg_temp_free_i32(fp1
);
6470 gen_store_fpr32(fp0
, fd
);
6471 tcg_temp_free_i32(fp0
);
6476 check_cp1_registers(ctx
, fd
);
6478 TCGv_i32 fp32
= tcg_temp_new_i32();
6479 TCGv_i64 fp64
= tcg_temp_new_i64();
6481 gen_load_fpr32(fp32
, fs
);
6482 gen_helper_float_cvtd_s(fp64
, fp32
);
6483 tcg_temp_free_i32(fp32
);
6484 gen_store_fpr64(ctx
, fp64
, fd
);
6485 tcg_temp_free_i64(fp64
);
6491 TCGv_i32 fp0
= tcg_temp_new_i32();
6493 gen_load_fpr32(fp0
, fs
);
6494 gen_helper_float_cvtw_s(fp0
, fp0
);
6495 gen_store_fpr32(fp0
, fd
);
6496 tcg_temp_free_i32(fp0
);
6501 check_cp1_64bitmode(ctx
);
6503 TCGv_i32 fp32
= tcg_temp_new_i32();
6504 TCGv_i64 fp64
= tcg_temp_new_i64();
6506 gen_load_fpr32(fp32
, fs
);
6507 gen_helper_float_cvtl_s(fp64
, fp32
);
6508 tcg_temp_free_i32(fp32
);
6509 gen_store_fpr64(ctx
, fp64
, fd
);
6510 tcg_temp_free_i64(fp64
);
6515 check_cp1_64bitmode(ctx
);
6517 TCGv_i64 fp64
= tcg_temp_new_i64();
6518 TCGv_i32 fp32_0
= tcg_temp_new_i32();
6519 TCGv_i32 fp32_1
= tcg_temp_new_i32();
6521 gen_load_fpr32(fp32_0
, fs
);
6522 gen_load_fpr32(fp32_1
, ft
);
6523 tcg_gen_concat_i32_i64(fp64
, fp32_0
, fp32_1
);
6524 tcg_temp_free_i32(fp32_1
);
6525 tcg_temp_free_i32(fp32_0
);
6526 gen_store_fpr64(ctx
, fp64
, fd
);
6527 tcg_temp_free_i64(fp64
);
6540 case OPC_CMP_NGLE_S
:
6547 if (ctx
->opcode
& (1 << 6)) {
6548 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
6549 opn
= condnames_abs
[func
-48];
6551 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
6552 opn
= condnames
[func
-48];
6556 check_cp1_registers(ctx
, fs
| ft
| fd
);
6558 TCGv_i64 fp0
= tcg_temp_new_i64();
6559 TCGv_i64 fp1
= tcg_temp_new_i64();
6561 gen_load_fpr64(ctx
, fp0
, fs
);
6562 gen_load_fpr64(ctx
, fp1
, ft
);
6563 gen_helper_float_add_d(fp0
, fp0
, fp1
);
6564 tcg_temp_free_i64(fp1
);
6565 gen_store_fpr64(ctx
, fp0
, fd
);
6566 tcg_temp_free_i64(fp0
);
6572 check_cp1_registers(ctx
, fs
| ft
| fd
);
6574 TCGv_i64 fp0
= tcg_temp_new_i64();
6575 TCGv_i64 fp1
= tcg_temp_new_i64();
6577 gen_load_fpr64(ctx
, fp0
, fs
);
6578 gen_load_fpr64(ctx
, fp1
, ft
);
6579 gen_helper_float_sub_d(fp0
, fp0
, fp1
);
6580 tcg_temp_free_i64(fp1
);
6581 gen_store_fpr64(ctx
, fp0
, fd
);
6582 tcg_temp_free_i64(fp0
);
6588 check_cp1_registers(ctx
, fs
| ft
| fd
);
6590 TCGv_i64 fp0
= tcg_temp_new_i64();
6591 TCGv_i64 fp1
= tcg_temp_new_i64();
6593 gen_load_fpr64(ctx
, fp0
, fs
);
6594 gen_load_fpr64(ctx
, fp1
, ft
);
6595 gen_helper_float_mul_d(fp0
, fp0
, fp1
);
6596 tcg_temp_free_i64(fp1
);
6597 gen_store_fpr64(ctx
, fp0
, fd
);
6598 tcg_temp_free_i64(fp0
);
6604 check_cp1_registers(ctx
, fs
| ft
| fd
);
6606 TCGv_i64 fp0
= tcg_temp_new_i64();
6607 TCGv_i64 fp1
= tcg_temp_new_i64();
6609 gen_load_fpr64(ctx
, fp0
, fs
);
6610 gen_load_fpr64(ctx
, fp1
, ft
);
6611 gen_helper_float_div_d(fp0
, fp0
, fp1
);
6612 tcg_temp_free_i64(fp1
);
6613 gen_store_fpr64(ctx
, fp0
, fd
);
6614 tcg_temp_free_i64(fp0
);
6620 check_cp1_registers(ctx
, fs
| fd
);
6622 TCGv_i64 fp0
= tcg_temp_new_i64();
6624 gen_load_fpr64(ctx
, fp0
, fs
);
6625 gen_helper_float_sqrt_d(fp0
, fp0
);
6626 gen_store_fpr64(ctx
, fp0
, fd
);
6627 tcg_temp_free_i64(fp0
);
6632 check_cp1_registers(ctx
, fs
| fd
);
6634 TCGv_i64 fp0
= tcg_temp_new_i64();
6636 gen_load_fpr64(ctx
, fp0
, fs
);
6637 gen_helper_float_abs_d(fp0
, fp0
);
6638 gen_store_fpr64(ctx
, fp0
, fd
);
6639 tcg_temp_free_i64(fp0
);
6644 check_cp1_registers(ctx
, fs
| fd
);
6646 TCGv_i64 fp0
= tcg_temp_new_i64();
6648 gen_load_fpr64(ctx
, fp0
, fs
);
6649 gen_store_fpr64(ctx
, fp0
, fd
);
6650 tcg_temp_free_i64(fp0
);
6655 check_cp1_registers(ctx
, fs
| fd
);
6657 TCGv_i64 fp0
= tcg_temp_new_i64();
6659 gen_load_fpr64(ctx
, fp0
, fs
);
6660 gen_helper_float_chs_d(fp0
, fp0
);
6661 gen_store_fpr64(ctx
, fp0
, fd
);
6662 tcg_temp_free_i64(fp0
);
6667 check_cp1_64bitmode(ctx
);
6669 TCGv_i64 fp0
= tcg_temp_new_i64();
6671 gen_load_fpr64(ctx
, fp0
, fs
);
6672 gen_helper_float_roundl_d(fp0
, fp0
);
6673 gen_store_fpr64(ctx
, fp0
, fd
);
6674 tcg_temp_free_i64(fp0
);
6679 check_cp1_64bitmode(ctx
);
6681 TCGv_i64 fp0
= tcg_temp_new_i64();
6683 gen_load_fpr64(ctx
, fp0
, fs
);
6684 gen_helper_float_truncl_d(fp0
, fp0
);
6685 gen_store_fpr64(ctx
, fp0
, fd
);
6686 tcg_temp_free_i64(fp0
);
6691 check_cp1_64bitmode(ctx
);
6693 TCGv_i64 fp0
= tcg_temp_new_i64();
6695 gen_load_fpr64(ctx
, fp0
, fs
);
6696 gen_helper_float_ceill_d(fp0
, fp0
);
6697 gen_store_fpr64(ctx
, fp0
, fd
);
6698 tcg_temp_free_i64(fp0
);
6703 check_cp1_64bitmode(ctx
);
6705 TCGv_i64 fp0
= tcg_temp_new_i64();
6707 gen_load_fpr64(ctx
, fp0
, fs
);
6708 gen_helper_float_floorl_d(fp0
, fp0
);
6709 gen_store_fpr64(ctx
, fp0
, fd
);
6710 tcg_temp_free_i64(fp0
);
6715 check_cp1_registers(ctx
, fs
);
6717 TCGv_i32 fp32
= tcg_temp_new_i32();
6718 TCGv_i64 fp64
= tcg_temp_new_i64();
6720 gen_load_fpr64(ctx
, fp64
, fs
);
6721 gen_helper_float_roundw_d(fp32
, fp64
);
6722 tcg_temp_free_i64(fp64
);
6723 gen_store_fpr32(fp32
, fd
);
6724 tcg_temp_free_i32(fp32
);
6729 check_cp1_registers(ctx
, fs
);
6731 TCGv_i32 fp32
= tcg_temp_new_i32();
6732 TCGv_i64 fp64
= tcg_temp_new_i64();
6734 gen_load_fpr64(ctx
, fp64
, fs
);
6735 gen_helper_float_truncw_d(fp32
, fp64
);
6736 tcg_temp_free_i64(fp64
);
6737 gen_store_fpr32(fp32
, fd
);
6738 tcg_temp_free_i32(fp32
);
6743 check_cp1_registers(ctx
, fs
);
6745 TCGv_i32 fp32
= tcg_temp_new_i32();
6746 TCGv_i64 fp64
= tcg_temp_new_i64();
6748 gen_load_fpr64(ctx
, fp64
, fs
);
6749 gen_helper_float_ceilw_d(fp32
, fp64
);
6750 tcg_temp_free_i64(fp64
);
6751 gen_store_fpr32(fp32
, fd
);
6752 tcg_temp_free_i32(fp32
);
6757 check_cp1_registers(ctx
, fs
);
6759 TCGv_i32 fp32
= tcg_temp_new_i32();
6760 TCGv_i64 fp64
= tcg_temp_new_i64();
6762 gen_load_fpr64(ctx
, fp64
, fs
);
6763 gen_helper_float_floorw_d(fp32
, fp64
);
6764 tcg_temp_free_i64(fp64
);
6765 gen_store_fpr32(fp32
, fd
);
6766 tcg_temp_free_i32(fp32
);
6771 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6776 int l1
= gen_new_label();
6780 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
6782 fp0
= tcg_temp_new_i64();
6783 gen_load_fpr64(ctx
, fp0
, fs
);
6784 gen_store_fpr64(ctx
, fp0
, fd
);
6785 tcg_temp_free_i64(fp0
);
6792 int l1
= gen_new_label();
6796 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
6797 fp0
= tcg_temp_new_i64();
6798 gen_load_fpr64(ctx
, fp0
, fs
);
6799 gen_store_fpr64(ctx
, fp0
, fd
);
6800 tcg_temp_free_i64(fp0
);
6807 check_cp1_64bitmode(ctx
);
6809 TCGv_i64 fp0
= tcg_temp_new_i64();
6811 gen_load_fpr64(ctx
, fp0
, fs
);
6812 gen_helper_float_recip_d(fp0
, fp0
);
6813 gen_store_fpr64(ctx
, fp0
, fd
);
6814 tcg_temp_free_i64(fp0
);
6819 check_cp1_64bitmode(ctx
);
6821 TCGv_i64 fp0
= tcg_temp_new_i64();
6823 gen_load_fpr64(ctx
, fp0
, fs
);
6824 gen_helper_float_rsqrt_d(fp0
, fp0
);
6825 gen_store_fpr64(ctx
, fp0
, fd
);
6826 tcg_temp_free_i64(fp0
);
6831 check_cp1_64bitmode(ctx
);
6833 TCGv_i64 fp0
= tcg_temp_new_i64();
6834 TCGv_i64 fp1
= tcg_temp_new_i64();
6836 gen_load_fpr64(ctx
, fp0
, fs
);
6837 gen_load_fpr64(ctx
, fp1
, ft
);
6838 gen_helper_float_recip2_d(fp0
, fp0
, fp1
);
6839 tcg_temp_free_i64(fp1
);
6840 gen_store_fpr64(ctx
, fp0
, fd
);
6841 tcg_temp_free_i64(fp0
);
6846 check_cp1_64bitmode(ctx
);
6848 TCGv_i64 fp0
= tcg_temp_new_i64();
6850 gen_load_fpr64(ctx
, fp0
, fs
);
6851 gen_helper_float_recip1_d(fp0
, fp0
);
6852 gen_store_fpr64(ctx
, fp0
, fd
);
6853 tcg_temp_free_i64(fp0
);
6858 check_cp1_64bitmode(ctx
);
6860 TCGv_i64 fp0
= tcg_temp_new_i64();
6862 gen_load_fpr64(ctx
, fp0
, fs
);
6863 gen_helper_float_rsqrt1_d(fp0
, fp0
);
6864 gen_store_fpr64(ctx
, fp0
, fd
);
6865 tcg_temp_free_i64(fp0
);
6870 check_cp1_64bitmode(ctx
);
6872 TCGv_i64 fp0
= tcg_temp_new_i64();
6873 TCGv_i64 fp1
= tcg_temp_new_i64();
6875 gen_load_fpr64(ctx
, fp0
, fs
);
6876 gen_load_fpr64(ctx
, fp1
, ft
);
6877 gen_helper_float_rsqrt2_d(fp0
, fp0
, fp1
);
6878 tcg_temp_free_i64(fp1
);
6879 gen_store_fpr64(ctx
, fp0
, fd
);
6880 tcg_temp_free_i64(fp0
);
6893 case OPC_CMP_NGLE_D
:
6900 if (ctx
->opcode
& (1 << 6)) {
6901 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
6902 opn
= condnames_abs
[func
-48];
6904 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
6905 opn
= condnames
[func
-48];
6909 check_cp1_registers(ctx
, fs
);
6911 TCGv_i32 fp32
= tcg_temp_new_i32();
6912 TCGv_i64 fp64
= tcg_temp_new_i64();
6914 gen_load_fpr64(ctx
, fp64
, fs
);
6915 gen_helper_float_cvts_d(fp32
, fp64
);
6916 tcg_temp_free_i64(fp64
);
6917 gen_store_fpr32(fp32
, fd
);
6918 tcg_temp_free_i32(fp32
);
6923 check_cp1_registers(ctx
, fs
);
6925 TCGv_i32 fp32
= tcg_temp_new_i32();
6926 TCGv_i64 fp64
= tcg_temp_new_i64();
6928 gen_load_fpr64(ctx
, fp64
, fs
);
6929 gen_helper_float_cvtw_d(fp32
, fp64
);
6930 tcg_temp_free_i64(fp64
);
6931 gen_store_fpr32(fp32
, fd
);
6932 tcg_temp_free_i32(fp32
);
6937 check_cp1_64bitmode(ctx
);
6939 TCGv_i64 fp0
= tcg_temp_new_i64();
6941 gen_load_fpr64(ctx
, fp0
, fs
);
6942 gen_helper_float_cvtl_d(fp0
, fp0
);
6943 gen_store_fpr64(ctx
, fp0
, fd
);
6944 tcg_temp_free_i64(fp0
);
6950 TCGv_i32 fp0
= tcg_temp_new_i32();
6952 gen_load_fpr32(fp0
, fs
);
6953 gen_helper_float_cvts_w(fp0
, fp0
);
6954 gen_store_fpr32(fp0
, fd
);
6955 tcg_temp_free_i32(fp0
);
6960 check_cp1_registers(ctx
, fd
);
6962 TCGv_i32 fp32
= tcg_temp_new_i32();
6963 TCGv_i64 fp64
= tcg_temp_new_i64();
6965 gen_load_fpr32(fp32
, fs
);
6966 gen_helper_float_cvtd_w(fp64
, fp32
);
6967 tcg_temp_free_i32(fp32
);
6968 gen_store_fpr64(ctx
, fp64
, fd
);
6969 tcg_temp_free_i64(fp64
);
6974 check_cp1_64bitmode(ctx
);
6976 TCGv_i32 fp32
= tcg_temp_new_i32();
6977 TCGv_i64 fp64
= tcg_temp_new_i64();
6979 gen_load_fpr64(ctx
, fp64
, fs
);
6980 gen_helper_float_cvts_l(fp32
, fp64
);
6981 tcg_temp_free_i64(fp64
);
6982 gen_store_fpr32(fp32
, fd
);
6983 tcg_temp_free_i32(fp32
);
6988 check_cp1_64bitmode(ctx
);
6990 TCGv_i64 fp0
= tcg_temp_new_i64();
6992 gen_load_fpr64(ctx
, fp0
, fs
);
6993 gen_helper_float_cvtd_l(fp0
, fp0
);
6994 gen_store_fpr64(ctx
, fp0
, fd
);
6995 tcg_temp_free_i64(fp0
);
7000 check_cp1_64bitmode(ctx
);
7002 TCGv_i64 fp0
= tcg_temp_new_i64();
7004 gen_load_fpr64(ctx
, fp0
, fs
);
7005 gen_helper_float_cvtps_pw(fp0
, fp0
);
7006 gen_store_fpr64(ctx
, fp0
, fd
);
7007 tcg_temp_free_i64(fp0
);
7012 check_cp1_64bitmode(ctx
);
7014 TCGv_i64 fp0
= tcg_temp_new_i64();
7015 TCGv_i64 fp1
= tcg_temp_new_i64();
7017 gen_load_fpr64(ctx
, fp0
, fs
);
7018 gen_load_fpr64(ctx
, fp1
, ft
);
7019 gen_helper_float_add_ps(fp0
, fp0
, fp1
);
7020 tcg_temp_free_i64(fp1
);
7021 gen_store_fpr64(ctx
, fp0
, fd
);
7022 tcg_temp_free_i64(fp0
);
7027 check_cp1_64bitmode(ctx
);
7029 TCGv_i64 fp0
= tcg_temp_new_i64();
7030 TCGv_i64 fp1
= tcg_temp_new_i64();
7032 gen_load_fpr64(ctx
, fp0
, fs
);
7033 gen_load_fpr64(ctx
, fp1
, ft
);
7034 gen_helper_float_sub_ps(fp0
, fp0
, fp1
);
7035 tcg_temp_free_i64(fp1
);
7036 gen_store_fpr64(ctx
, fp0
, fd
);
7037 tcg_temp_free_i64(fp0
);
7042 check_cp1_64bitmode(ctx
);
7044 TCGv_i64 fp0
= tcg_temp_new_i64();
7045 TCGv_i64 fp1
= tcg_temp_new_i64();
7047 gen_load_fpr64(ctx
, fp0
, fs
);
7048 gen_load_fpr64(ctx
, fp1
, ft
);
7049 gen_helper_float_mul_ps(fp0
, fp0
, fp1
);
7050 tcg_temp_free_i64(fp1
);
7051 gen_store_fpr64(ctx
, fp0
, fd
);
7052 tcg_temp_free_i64(fp0
);
7057 check_cp1_64bitmode(ctx
);
7059 TCGv_i64 fp0
= tcg_temp_new_i64();
7061 gen_load_fpr64(ctx
, fp0
, fs
);
7062 gen_helper_float_abs_ps(fp0
, fp0
);
7063 gen_store_fpr64(ctx
, fp0
, fd
);
7064 tcg_temp_free_i64(fp0
);
7069 check_cp1_64bitmode(ctx
);
7071 TCGv_i64 fp0
= tcg_temp_new_i64();
7073 gen_load_fpr64(ctx
, fp0
, fs
);
7074 gen_store_fpr64(ctx
, fp0
, fd
);
7075 tcg_temp_free_i64(fp0
);
7080 check_cp1_64bitmode(ctx
);
7082 TCGv_i64 fp0
= tcg_temp_new_i64();
7084 gen_load_fpr64(ctx
, fp0
, fs
);
7085 gen_helper_float_chs_ps(fp0
, fp0
);
7086 gen_store_fpr64(ctx
, fp0
, fd
);
7087 tcg_temp_free_i64(fp0
);
7092 check_cp1_64bitmode(ctx
);
7093 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7097 check_cp1_64bitmode(ctx
);
7099 int l1
= gen_new_label();
7103 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7104 fp0
= tcg_temp_new_i64();
7105 gen_load_fpr64(ctx
, fp0
, fs
);
7106 gen_store_fpr64(ctx
, fp0
, fd
);
7107 tcg_temp_free_i64(fp0
);
7113 check_cp1_64bitmode(ctx
);
7115 int l1
= gen_new_label();
7119 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7120 fp0
= tcg_temp_new_i64();
7121 gen_load_fpr64(ctx
, fp0
, fs
);
7122 gen_store_fpr64(ctx
, fp0
, fd
);
7123 tcg_temp_free_i64(fp0
);
7130 check_cp1_64bitmode(ctx
);
7132 TCGv_i64 fp0
= tcg_temp_new_i64();
7133 TCGv_i64 fp1
= tcg_temp_new_i64();
7135 gen_load_fpr64(ctx
, fp0
, ft
);
7136 gen_load_fpr64(ctx
, fp1
, fs
);
7137 gen_helper_float_addr_ps(fp0
, fp0
, fp1
);
7138 tcg_temp_free_i64(fp1
);
7139 gen_store_fpr64(ctx
, fp0
, fd
);
7140 tcg_temp_free_i64(fp0
);
7145 check_cp1_64bitmode(ctx
);
7147 TCGv_i64 fp0
= tcg_temp_new_i64();
7148 TCGv_i64 fp1
= tcg_temp_new_i64();
7150 gen_load_fpr64(ctx
, fp0
, ft
);
7151 gen_load_fpr64(ctx
, fp1
, fs
);
7152 gen_helper_float_mulr_ps(fp0
, fp0
, fp1
);
7153 tcg_temp_free_i64(fp1
);
7154 gen_store_fpr64(ctx
, fp0
, fd
);
7155 tcg_temp_free_i64(fp0
);
7160 check_cp1_64bitmode(ctx
);
7162 TCGv_i64 fp0
= tcg_temp_new_i64();
7163 TCGv_i64 fp1
= tcg_temp_new_i64();
7165 gen_load_fpr64(ctx
, fp0
, fs
);
7166 gen_load_fpr64(ctx
, fp1
, fd
);
7167 gen_helper_float_recip2_ps(fp0
, fp0
, fp1
);
7168 tcg_temp_free_i64(fp1
);
7169 gen_store_fpr64(ctx
, fp0
, fd
);
7170 tcg_temp_free_i64(fp0
);
7175 check_cp1_64bitmode(ctx
);
7177 TCGv_i64 fp0
= tcg_temp_new_i64();
7179 gen_load_fpr64(ctx
, fp0
, fs
);
7180 gen_helper_float_recip1_ps(fp0
, fp0
);
7181 gen_store_fpr64(ctx
, fp0
, fd
);
7182 tcg_temp_free_i64(fp0
);
7187 check_cp1_64bitmode(ctx
);
7189 TCGv_i64 fp0
= tcg_temp_new_i64();
7191 gen_load_fpr64(ctx
, fp0
, fs
);
7192 gen_helper_float_rsqrt1_ps(fp0
, fp0
);
7193 gen_store_fpr64(ctx
, fp0
, fd
);
7194 tcg_temp_free_i64(fp0
);
7199 check_cp1_64bitmode(ctx
);
7201 TCGv_i64 fp0
= tcg_temp_new_i64();
7202 TCGv_i64 fp1
= tcg_temp_new_i64();
7204 gen_load_fpr64(ctx
, fp0
, fs
);
7205 gen_load_fpr64(ctx
, fp1
, ft
);
7206 gen_helper_float_rsqrt2_ps(fp0
, fp0
, fp1
);
7207 tcg_temp_free_i64(fp1
);
7208 gen_store_fpr64(ctx
, fp0
, fd
);
7209 tcg_temp_free_i64(fp0
);
7214 check_cp1_64bitmode(ctx
);
7216 TCGv_i32 fp0
= tcg_temp_new_i32();
7218 gen_load_fpr32h(fp0
, fs
);
7219 gen_helper_float_cvts_pu(fp0
, fp0
);
7220 gen_store_fpr32(fp0
, fd
);
7221 tcg_temp_free_i32(fp0
);
7226 check_cp1_64bitmode(ctx
);
7228 TCGv_i64 fp0
= tcg_temp_new_i64();
7230 gen_load_fpr64(ctx
, fp0
, fs
);
7231 gen_helper_float_cvtpw_ps(fp0
, fp0
);
7232 gen_store_fpr64(ctx
, fp0
, fd
);
7233 tcg_temp_free_i64(fp0
);
7238 check_cp1_64bitmode(ctx
);
7240 TCGv_i32 fp0
= tcg_temp_new_i32();
7242 gen_load_fpr32(fp0
, fs
);
7243 gen_helper_float_cvts_pl(fp0
, fp0
);
7244 gen_store_fpr32(fp0
, fd
);
7245 tcg_temp_free_i32(fp0
);
7250 check_cp1_64bitmode(ctx
);
7252 TCGv_i32 fp0
= tcg_temp_new_i32();
7253 TCGv_i32 fp1
= tcg_temp_new_i32();
7255 gen_load_fpr32(fp0
, fs
);
7256 gen_load_fpr32(fp1
, ft
);
7257 gen_store_fpr32h(fp0
, fd
);
7258 gen_store_fpr32(fp1
, fd
);
7259 tcg_temp_free_i32(fp0
);
7260 tcg_temp_free_i32(fp1
);
7265 check_cp1_64bitmode(ctx
);
7267 TCGv_i32 fp0
= tcg_temp_new_i32();
7268 TCGv_i32 fp1
= tcg_temp_new_i32();
7270 gen_load_fpr32(fp0
, fs
);
7271 gen_load_fpr32h(fp1
, ft
);
7272 gen_store_fpr32(fp1
, fd
);
7273 gen_store_fpr32h(fp0
, fd
);
7274 tcg_temp_free_i32(fp0
);
7275 tcg_temp_free_i32(fp1
);
7280 check_cp1_64bitmode(ctx
);
7282 TCGv_i32 fp0
= tcg_temp_new_i32();
7283 TCGv_i32 fp1
= tcg_temp_new_i32();
7285 gen_load_fpr32h(fp0
, fs
);
7286 gen_load_fpr32(fp1
, ft
);
7287 gen_store_fpr32(fp1
, fd
);
7288 gen_store_fpr32h(fp0
, fd
);
7289 tcg_temp_free_i32(fp0
);
7290 tcg_temp_free_i32(fp1
);
7295 check_cp1_64bitmode(ctx
);
7297 TCGv_i32 fp0
= tcg_temp_new_i32();
7298 TCGv_i32 fp1
= tcg_temp_new_i32();
7300 gen_load_fpr32h(fp0
, fs
);
7301 gen_load_fpr32h(fp1
, ft
);
7302 gen_store_fpr32(fp1
, fd
);
7303 gen_store_fpr32h(fp0
, fd
);
7304 tcg_temp_free_i32(fp0
);
7305 tcg_temp_free_i32(fp1
);
7312 case OPC_CMP_UEQ_PS
:
7313 case OPC_CMP_OLT_PS
:
7314 case OPC_CMP_ULT_PS
:
7315 case OPC_CMP_OLE_PS
:
7316 case OPC_CMP_ULE_PS
:
7318 case OPC_CMP_NGLE_PS
:
7319 case OPC_CMP_SEQ_PS
:
7320 case OPC_CMP_NGL_PS
:
7322 case OPC_CMP_NGE_PS
:
7324 case OPC_CMP_NGT_PS
:
7325 if (ctx
->opcode
& (1 << 6)) {
7326 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
7327 opn
= condnames_abs
[func
-48];
7329 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
7330 opn
= condnames
[func
-48];
7335 generate_exception (ctx
, EXCP_RI
);
7340 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
7343 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
7346 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
7351 /* Coprocessor 3 (FPU) */
7352 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
7353 int fd
, int fs
, int base
, int index
)
7355 const char *opn
= "extended float load/store";
7357 TCGv t0
= tcg_temp_new();
7360 gen_load_gpr(t0
, index
);
7361 } else if (index
== 0) {
7362 gen_load_gpr(t0
, base
);
7364 gen_load_gpr(t0
, index
);
7365 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], t0
);
7367 /* Don't do NOP if destination is zero: we must perform the actual
7369 save_cpu_state(ctx
, 0);
7374 TCGv_i32 fp0
= tcg_temp_new_i32();
7376 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
7377 tcg_gen_trunc_tl_i32(fp0
, t0
);
7378 gen_store_fpr32(fp0
, fd
);
7379 tcg_temp_free_i32(fp0
);
7385 check_cp1_registers(ctx
, fd
);
7387 TCGv_i64 fp0
= tcg_temp_new_i64();
7389 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7390 gen_store_fpr64(ctx
, fp0
, fd
);
7391 tcg_temp_free_i64(fp0
);
7396 check_cp1_64bitmode(ctx
);
7397 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7399 TCGv_i64 fp0
= tcg_temp_new_i64();
7401 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7402 gen_store_fpr64(ctx
, fp0
, fd
);
7403 tcg_temp_free_i64(fp0
);
7410 TCGv_i32 fp0
= tcg_temp_new_i32();
7411 TCGv t1
= tcg_temp_new();
7413 gen_load_fpr32(fp0
, fs
);
7414 tcg_gen_extu_i32_tl(t1
, fp0
);
7415 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
7416 tcg_temp_free_i32(fp0
);
7424 check_cp1_registers(ctx
, fs
);
7426 TCGv_i64 fp0
= tcg_temp_new_i64();
7428 gen_load_fpr64(ctx
, fp0
, fs
);
7429 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7430 tcg_temp_free_i64(fp0
);
7436 check_cp1_64bitmode(ctx
);
7437 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7439 TCGv_i64 fp0
= tcg_temp_new_i64();
7441 gen_load_fpr64(ctx
, fp0
, fs
);
7442 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7443 tcg_temp_free_i64(fp0
);
7450 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
7451 regnames
[index
], regnames
[base
]);
7454 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
7455 int fd
, int fr
, int fs
, int ft
)
7457 const char *opn
= "flt3_arith";
7461 check_cp1_64bitmode(ctx
);
7463 TCGv t0
= tcg_temp_local_new();
7464 TCGv_i32 fp
= tcg_temp_new_i32();
7465 TCGv_i32 fph
= tcg_temp_new_i32();
7466 int l1
= gen_new_label();
7467 int l2
= gen_new_label();
7469 gen_load_gpr(t0
, fr
);
7470 tcg_gen_andi_tl(t0
, t0
, 0x7);
7472 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
7473 gen_load_fpr32(fp
, fs
);
7474 gen_load_fpr32h(fph
, fs
);
7475 gen_store_fpr32(fp
, fd
);
7476 gen_store_fpr32h(fph
, fd
);
7479 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
7481 #ifdef TARGET_WORDS_BIGENDIAN
7482 gen_load_fpr32(fp
, fs
);
7483 gen_load_fpr32h(fph
, ft
);
7484 gen_store_fpr32h(fp
, fd
);
7485 gen_store_fpr32(fph
, fd
);
7487 gen_load_fpr32h(fph
, fs
);
7488 gen_load_fpr32(fp
, ft
);
7489 gen_store_fpr32(fph
, fd
);
7490 gen_store_fpr32h(fp
, fd
);
7493 tcg_temp_free_i32(fp
);
7494 tcg_temp_free_i32(fph
);
7501 TCGv_i32 fp0
= tcg_temp_new_i32();
7502 TCGv_i32 fp1
= tcg_temp_new_i32();
7503 TCGv_i32 fp2
= tcg_temp_new_i32();
7505 gen_load_fpr32(fp0
, fs
);
7506 gen_load_fpr32(fp1
, ft
);
7507 gen_load_fpr32(fp2
, fr
);
7508 gen_helper_float_muladd_s(fp2
, fp0
, fp1
, fp2
);
7509 tcg_temp_free_i32(fp0
);
7510 tcg_temp_free_i32(fp1
);
7511 gen_store_fpr32(fp2
, fd
);
7512 tcg_temp_free_i32(fp2
);
7518 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7520 TCGv_i64 fp0
= tcg_temp_new_i64();
7521 TCGv_i64 fp1
= tcg_temp_new_i64();
7522 TCGv_i64 fp2
= tcg_temp_new_i64();
7524 gen_load_fpr64(ctx
, fp0
, fs
);
7525 gen_load_fpr64(ctx
, fp1
, ft
);
7526 gen_load_fpr64(ctx
, fp2
, fr
);
7527 gen_helper_float_muladd_d(fp2
, fp0
, fp1
, fp2
);
7528 tcg_temp_free_i64(fp0
);
7529 tcg_temp_free_i64(fp1
);
7530 gen_store_fpr64(ctx
, fp2
, fd
);
7531 tcg_temp_free_i64(fp2
);
7536 check_cp1_64bitmode(ctx
);
7538 TCGv_i64 fp0
= tcg_temp_new_i64();
7539 TCGv_i64 fp1
= tcg_temp_new_i64();
7540 TCGv_i64 fp2
= tcg_temp_new_i64();
7542 gen_load_fpr64(ctx
, fp0
, fs
);
7543 gen_load_fpr64(ctx
, fp1
, ft
);
7544 gen_load_fpr64(ctx
, fp2
, fr
);
7545 gen_helper_float_muladd_ps(fp2
, fp0
, fp1
, fp2
);
7546 tcg_temp_free_i64(fp0
);
7547 tcg_temp_free_i64(fp1
);
7548 gen_store_fpr64(ctx
, fp2
, fd
);
7549 tcg_temp_free_i64(fp2
);
7556 TCGv_i32 fp0
= tcg_temp_new_i32();
7557 TCGv_i32 fp1
= tcg_temp_new_i32();
7558 TCGv_i32 fp2
= tcg_temp_new_i32();
7560 gen_load_fpr32(fp0
, fs
);
7561 gen_load_fpr32(fp1
, ft
);
7562 gen_load_fpr32(fp2
, fr
);
7563 gen_helper_float_mulsub_s(fp2
, fp0
, fp1
, fp2
);
7564 tcg_temp_free_i32(fp0
);
7565 tcg_temp_free_i32(fp1
);
7566 gen_store_fpr32(fp2
, fd
);
7567 tcg_temp_free_i32(fp2
);
7573 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7575 TCGv_i64 fp0
= tcg_temp_new_i64();
7576 TCGv_i64 fp1
= tcg_temp_new_i64();
7577 TCGv_i64 fp2
= tcg_temp_new_i64();
7579 gen_load_fpr64(ctx
, fp0
, fs
);
7580 gen_load_fpr64(ctx
, fp1
, ft
);
7581 gen_load_fpr64(ctx
, fp2
, fr
);
7582 gen_helper_float_mulsub_d(fp2
, fp0
, fp1
, fp2
);
7583 tcg_temp_free_i64(fp0
);
7584 tcg_temp_free_i64(fp1
);
7585 gen_store_fpr64(ctx
, fp2
, fd
);
7586 tcg_temp_free_i64(fp2
);
7591 check_cp1_64bitmode(ctx
);
7593 TCGv_i64 fp0
= tcg_temp_new_i64();
7594 TCGv_i64 fp1
= tcg_temp_new_i64();
7595 TCGv_i64 fp2
= tcg_temp_new_i64();
7597 gen_load_fpr64(ctx
, fp0
, fs
);
7598 gen_load_fpr64(ctx
, fp1
, ft
);
7599 gen_load_fpr64(ctx
, fp2
, fr
);
7600 gen_helper_float_mulsub_ps(fp2
, fp0
, fp1
, fp2
);
7601 tcg_temp_free_i64(fp0
);
7602 tcg_temp_free_i64(fp1
);
7603 gen_store_fpr64(ctx
, fp2
, fd
);
7604 tcg_temp_free_i64(fp2
);
7611 TCGv_i32 fp0
= tcg_temp_new_i32();
7612 TCGv_i32 fp1
= tcg_temp_new_i32();
7613 TCGv_i32 fp2
= tcg_temp_new_i32();
7615 gen_load_fpr32(fp0
, fs
);
7616 gen_load_fpr32(fp1
, ft
);
7617 gen_load_fpr32(fp2
, fr
);
7618 gen_helper_float_nmuladd_s(fp2
, fp0
, fp1
, fp2
);
7619 tcg_temp_free_i32(fp0
);
7620 tcg_temp_free_i32(fp1
);
7621 gen_store_fpr32(fp2
, fd
);
7622 tcg_temp_free_i32(fp2
);
7628 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7630 TCGv_i64 fp0
= tcg_temp_new_i64();
7631 TCGv_i64 fp1
= tcg_temp_new_i64();
7632 TCGv_i64 fp2
= tcg_temp_new_i64();
7634 gen_load_fpr64(ctx
, fp0
, fs
);
7635 gen_load_fpr64(ctx
, fp1
, ft
);
7636 gen_load_fpr64(ctx
, fp2
, fr
);
7637 gen_helper_float_nmuladd_d(fp2
, fp0
, fp1
, fp2
);
7638 tcg_temp_free_i64(fp0
);
7639 tcg_temp_free_i64(fp1
);
7640 gen_store_fpr64(ctx
, fp2
, fd
);
7641 tcg_temp_free_i64(fp2
);
7646 check_cp1_64bitmode(ctx
);
7648 TCGv_i64 fp0
= tcg_temp_new_i64();
7649 TCGv_i64 fp1
= tcg_temp_new_i64();
7650 TCGv_i64 fp2
= tcg_temp_new_i64();
7652 gen_load_fpr64(ctx
, fp0
, fs
);
7653 gen_load_fpr64(ctx
, fp1
, ft
);
7654 gen_load_fpr64(ctx
, fp2
, fr
);
7655 gen_helper_float_nmuladd_ps(fp2
, fp0
, fp1
, fp2
);
7656 tcg_temp_free_i64(fp0
);
7657 tcg_temp_free_i64(fp1
);
7658 gen_store_fpr64(ctx
, fp2
, fd
);
7659 tcg_temp_free_i64(fp2
);
7666 TCGv_i32 fp0
= tcg_temp_new_i32();
7667 TCGv_i32 fp1
= tcg_temp_new_i32();
7668 TCGv_i32 fp2
= tcg_temp_new_i32();
7670 gen_load_fpr32(fp0
, fs
);
7671 gen_load_fpr32(fp1
, ft
);
7672 gen_load_fpr32(fp2
, fr
);
7673 gen_helper_float_nmulsub_s(fp2
, fp0
, fp1
, fp2
);
7674 tcg_temp_free_i32(fp0
);
7675 tcg_temp_free_i32(fp1
);
7676 gen_store_fpr32(fp2
, fd
);
7677 tcg_temp_free_i32(fp2
);
7683 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7685 TCGv_i64 fp0
= tcg_temp_new_i64();
7686 TCGv_i64 fp1
= tcg_temp_new_i64();
7687 TCGv_i64 fp2
= tcg_temp_new_i64();
7689 gen_load_fpr64(ctx
, fp0
, fs
);
7690 gen_load_fpr64(ctx
, fp1
, ft
);
7691 gen_load_fpr64(ctx
, fp2
, fr
);
7692 gen_helper_float_nmulsub_d(fp2
, fp0
, fp1
, fp2
);
7693 tcg_temp_free_i64(fp0
);
7694 tcg_temp_free_i64(fp1
);
7695 gen_store_fpr64(ctx
, fp2
, fd
);
7696 tcg_temp_free_i64(fp2
);
7701 check_cp1_64bitmode(ctx
);
7703 TCGv_i64 fp0
= tcg_temp_new_i64();
7704 TCGv_i64 fp1
= tcg_temp_new_i64();
7705 TCGv_i64 fp2
= tcg_temp_new_i64();
7707 gen_load_fpr64(ctx
, fp0
, fs
);
7708 gen_load_fpr64(ctx
, fp1
, ft
);
7709 gen_load_fpr64(ctx
, fp2
, fr
);
7710 gen_helper_float_nmulsub_ps(fp2
, fp0
, fp1
, fp2
);
7711 tcg_temp_free_i64(fp0
);
7712 tcg_temp_free_i64(fp1
);
7713 gen_store_fpr64(ctx
, fp2
, fd
);
7714 tcg_temp_free_i64(fp2
);
7720 generate_exception (ctx
, EXCP_RI
);
7723 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
7724 fregnames
[fs
], fregnames
[ft
]);
7728 gen_rdhwr (CPUState
*env
, DisasContext
*ctx
, int rt
, int rd
)
7732 check_insn(env
, ctx
, ISA_MIPS32R2
);
7733 t0
= tcg_temp_new();
7737 save_cpu_state(ctx
, 1);
7738 gen_helper_rdhwr_cpunum(t0
);
7739 gen_store_gpr(t0
, rt
);
7742 save_cpu_state(ctx
, 1);
7743 gen_helper_rdhwr_synci_step(t0
);
7744 gen_store_gpr(t0
, rt
);
7747 save_cpu_state(ctx
, 1);
7748 gen_helper_rdhwr_cc(t0
);
7749 gen_store_gpr(t0
, rt
);
7752 save_cpu_state(ctx
, 1);
7753 gen_helper_rdhwr_ccres(t0
);
7754 gen_store_gpr(t0
, rt
);
7757 #if defined(CONFIG_USER_ONLY)
7758 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUState
, tls_value
));
7759 gen_store_gpr(t0
, rt
);
7762 /* XXX: Some CPUs implement this in hardware.
7763 Not supported yet. */
7765 default: /* Invalid */
7766 MIPS_INVAL("rdhwr");
7767 generate_exception(ctx
, EXCP_RI
);
7773 static void handle_delay_slot (CPUState
*env
, DisasContext
*ctx
,
7776 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
7777 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
7778 /* Branches completion */
7779 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
7780 ctx
->bstate
= BS_BRANCH
;
7781 save_cpu_state(ctx
, 0);
7782 /* FIXME: Need to clear can_do_io. */
7783 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
7785 /* unconditional branch */
7786 MIPS_DEBUG("unconditional branch");
7787 if (proc_hflags
& MIPS_HFLAG_BX
) {
7788 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
7790 gen_goto_tb(ctx
, 0, ctx
->btarget
);
7793 /* blikely taken case */
7794 MIPS_DEBUG("blikely branch taken");
7795 gen_goto_tb(ctx
, 0, ctx
->btarget
);
7798 /* Conditional branch */
7799 MIPS_DEBUG("conditional branch");
7801 int l1
= gen_new_label();
7803 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
7804 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
7806 gen_goto_tb(ctx
, 0, ctx
->btarget
);
7810 /* unconditional branch to register */
7811 MIPS_DEBUG("branch to register");
7812 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
7813 TCGv t0
= tcg_temp_new();
7814 TCGv_i32 t1
= tcg_temp_new_i32();
7816 tcg_gen_andi_tl(t0
, btarget
, 0x1);
7817 tcg_gen_trunc_tl_i32(t1
, t0
);
7819 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
7820 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
7821 tcg_gen_or_i32(hflags
, hflags
, t1
);
7822 tcg_temp_free_i32(t1
);
7824 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
7826 tcg_gen_mov_tl(cpu_PC
, btarget
);
7828 if (ctx
->singlestep_enabled
) {
7829 save_cpu_state(ctx
, 0);
7830 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
7835 MIPS_DEBUG("unknown branch");
7841 /* ISA extensions (ASEs) */
7842 /* MIPS16 extension to MIPS32 */
7844 /* MIPS16 major opcodes */
7846 M16_OPC_ADDIUSP
= 0x00,
7847 M16_OPC_ADDIUPC
= 0x01,
7850 M16_OPC_BEQZ
= 0x04,
7851 M16_OPC_BNEQZ
= 0x05,
7852 M16_OPC_SHIFT
= 0x06,
7854 M16_OPC_RRIA
= 0x08,
7855 M16_OPC_ADDIU8
= 0x09,
7856 M16_OPC_SLTI
= 0x0a,
7857 M16_OPC_SLTIU
= 0x0b,
7860 M16_OPC_CMPI
= 0x0e,
7864 M16_OPC_LWSP
= 0x12,
7868 M16_OPC_LWPC
= 0x16,
7872 M16_OPC_SWSP
= 0x1a,
7876 M16_OPC_EXTEND
= 0x1e,
7880 /* I8 funct field */
7899 /* RR funct field */
7933 /* I64 funct field */
7945 /* RR ry field for CNVT */
7947 RR_RY_CNVT_ZEB
= 0x0,
7948 RR_RY_CNVT_ZEH
= 0x1,
7949 RR_RY_CNVT_ZEW
= 0x2,
7950 RR_RY_CNVT_SEB
= 0x4,
7951 RR_RY_CNVT_SEH
= 0x5,
7952 RR_RY_CNVT_SEW
= 0x6,
7955 static int xlat (int r
)
7957 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
7962 static void gen_mips16_save (DisasContext
*ctx
,
7963 int xsregs
, int aregs
,
7964 int do_ra
, int do_s0
, int do_s1
,
7967 TCGv t0
= tcg_temp_new();
7968 TCGv t1
= tcg_temp_new();
7998 generate_exception(ctx
, EXCP_RI
);
8004 gen_base_offset_addr(ctx
, t0
, 29, 12);
8005 gen_load_gpr(t1
, 7);
8006 op_ldst_sw(t1
, t0
, ctx
);
8009 gen_base_offset_addr(ctx
, t0
, 29, 8);
8010 gen_load_gpr(t1
, 6);
8011 op_ldst_sw(t1
, t0
, ctx
);
8014 gen_base_offset_addr(ctx
, t0
, 29, 4);
8015 gen_load_gpr(t1
, 5);
8016 op_ldst_sw(t1
, t0
, ctx
);
8019 gen_base_offset_addr(ctx
, t0
, 29, 0);
8020 gen_load_gpr(t1
, 4);
8021 op_ldst_sw(t1
, t0
, ctx
);
8024 gen_load_gpr(t0
, 29);
8026 #define DECR_AND_STORE(reg) do { \
8027 tcg_gen_subi_tl(t0, t0, 4); \
8028 gen_load_gpr(t1, reg); \
8029 op_ldst_sw(t1, t0, ctx); \
8093 generate_exception(ctx
, EXCP_RI
);
8109 #undef DECR_AND_STORE
8111 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8116 static void gen_mips16_restore (DisasContext
*ctx
,
8117 int xsregs
, int aregs
,
8118 int do_ra
, int do_s0
, int do_s1
,
8122 TCGv t0
= tcg_temp_new();
8123 TCGv t1
= tcg_temp_new();
8125 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8127 #define DECR_AND_LOAD(reg) do { \
8128 tcg_gen_subi_tl(t0, t0, 4); \
8129 op_ldst_lw(t1, t0, ctx); \
8130 gen_store_gpr(t1, reg); \
8194 generate_exception(ctx
, EXCP_RI
);
8210 #undef DECR_AND_LOAD
8212 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8217 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
8218 int is_64_bit
, int extended
)
8222 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8223 generate_exception(ctx
, EXCP_RI
);
8227 t0
= tcg_temp_new();
8229 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
8230 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
8232 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8238 #if defined(TARGET_MIPS64)
8239 static void decode_i64_mips16 (CPUState
*env
, DisasContext
*ctx
,
8240 int ry
, int funct
, int16_t offset
,
8246 offset
= extended
? offset
: offset
<< 3;
8247 gen_ldst(ctx
, OPC_LD
, ry
, 29, offset
);
8251 offset
= extended
? offset
: offset
<< 3;
8252 gen_ldst(ctx
, OPC_SD
, ry
, 29, offset
);
8256 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
8257 gen_ldst(ctx
, OPC_SD
, 31, 29, offset
);
8261 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
8262 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
8265 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8266 generate_exception(ctx
, EXCP_RI
);
8268 offset
= extended
? offset
: offset
<< 3;
8269 gen_ldst(ctx
, OPC_LDPC
, ry
, 0, offset
);
8274 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
8275 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
8279 offset
= extended
? offset
: offset
<< 2;
8280 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
8284 offset
= extended
? offset
: offset
<< 2;
8285 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
8291 static int decode_extended_mips16_opc (CPUState
*env
, DisasContext
*ctx
,
8294 int extend
= lduw_code(ctx
->pc
+ 2);
8295 int op
, rx
, ry
, funct
, sa
;
8296 int16_t imm
, offset
;
8298 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
8299 op
= (ctx
->opcode
>> 11) & 0x1f;
8300 sa
= (ctx
->opcode
>> 22) & 0x1f;
8301 funct
= (ctx
->opcode
>> 8) & 0x7;
8302 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8303 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8304 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
8305 | ((ctx
->opcode
>> 21) & 0x3f) << 5
8306 | (ctx
->opcode
& 0x1f));
8308 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
8311 case M16_OPC_ADDIUSP
:
8312 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8314 case M16_OPC_ADDIUPC
:
8315 gen_addiupc(ctx
, rx
, imm
, 0, 1);
8318 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
8319 /* No delay slot, so just process as a normal instruction */
8322 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
8323 /* No delay slot, so just process as a normal instruction */
8326 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
8327 /* No delay slot, so just process as a normal instruction */
8330 switch (ctx
->opcode
& 0x3) {
8332 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8335 #if defined(TARGET_MIPS64)
8337 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8339 generate_exception(ctx
, EXCP_RI
);
8343 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8346 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8350 #if defined(TARGET_MIPS64)
8353 gen_ldst(ctx
, OPC_LD
, ry
, rx
, offset
);
8357 imm
= ctx
->opcode
& 0xf;
8358 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
8359 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
8360 imm
= (int16_t) (imm
<< 1) >> 1;
8361 if ((ctx
->opcode
>> 4) & 0x1) {
8362 #if defined(TARGET_MIPS64)
8364 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8366 generate_exception(ctx
, EXCP_RI
);
8369 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8372 case M16_OPC_ADDIU8
:
8373 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8376 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8379 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8384 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
8387 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
8390 gen_ldst(ctx
, OPC_SW
, 31, 29, imm
);
8393 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
8397 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
8398 int aregs
= (ctx
->opcode
>> 16) & 0xf;
8399 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
8400 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
8401 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
8402 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
8403 | (ctx
->opcode
& 0xf)) << 3;
8405 if (ctx
->opcode
& (1 << 7)) {
8406 gen_mips16_save(ctx
, xsregs
, aregs
,
8407 do_ra
, do_s0
, do_s1
,
8410 gen_mips16_restore(ctx
, xsregs
, aregs
,
8411 do_ra
, do_s0
, do_s1
,
8417 generate_exception(ctx
, EXCP_RI
);
8422 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
8425 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
8427 #if defined(TARGET_MIPS64)
8429 gen_ldst(ctx
, OPC_SD
, ry
, rx
, offset
);
8433 gen_ldst(ctx
, OPC_LB
, ry
, rx
, offset
);
8436 gen_ldst(ctx
, OPC_LH
, ry
, rx
, offset
);
8439 gen_ldst(ctx
, OPC_LW
, rx
, 29, offset
);
8442 gen_ldst(ctx
, OPC_LW
, ry
, rx
, offset
);
8445 gen_ldst(ctx
, OPC_LBU
, ry
, rx
, offset
);
8448 gen_ldst(ctx
, OPC_LHU
, ry
, rx
, offset
);
8451 gen_ldst(ctx
, OPC_LWPC
, rx
, 0, offset
);
8453 #if defined(TARGET_MIPS64)
8455 gen_ldst(ctx
, OPC_LWU
, ry
, rx
, offset
);
8459 gen_ldst(ctx
, OPC_SB
, ry
, rx
, offset
);
8462 gen_ldst(ctx
, OPC_SH
, ry
, rx
, offset
);
8465 gen_ldst(ctx
, OPC_SW
, rx
, 29, offset
);
8468 gen_ldst(ctx
, OPC_SW
, ry
, rx
, offset
);
8470 #if defined(TARGET_MIPS64)
8472 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
8476 generate_exception(ctx
, EXCP_RI
);
8483 static int decode_mips16_opc (CPUState
*env
, DisasContext
*ctx
,
8488 int op
, cnvt_op
, op1
, offset
;
8492 op
= (ctx
->opcode
>> 11) & 0x1f;
8493 sa
= (ctx
->opcode
>> 2) & 0x7;
8494 sa
= sa
== 0 ? 8 : sa
;
8495 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8496 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
8497 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8498 op1
= offset
= ctx
->opcode
& 0x1f;
8503 case M16_OPC_ADDIUSP
:
8505 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
8507 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8510 case M16_OPC_ADDIUPC
:
8511 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
8514 offset
= (ctx
->opcode
& 0x7ff) << 1;
8515 offset
= (int16_t)(offset
<< 4) >> 4;
8516 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
8517 /* No delay slot, so just process as a normal instruction */
8520 offset
= lduw_code(ctx
->pc
+ 2);
8521 offset
= (((ctx
->opcode
& 0x1f) << 21)
8522 | ((ctx
->opcode
>> 5) & 0x1f) << 16
8524 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
8525 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
8530 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8531 /* No delay slot, so just process as a normal instruction */
8534 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8535 /* No delay slot, so just process as a normal instruction */
8538 switch (ctx
->opcode
& 0x3) {
8540 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8543 #if defined(TARGET_MIPS64)
8545 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8547 generate_exception(ctx
, EXCP_RI
);
8551 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8554 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8558 #if defined(TARGET_MIPS64)
8561 gen_ldst(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
8566 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
8568 if ((ctx
->opcode
>> 4) & 1) {
8569 #if defined(TARGET_MIPS64)
8571 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8573 generate_exception(ctx
, EXCP_RI
);
8576 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8580 case M16_OPC_ADDIU8
:
8582 int16_t imm
= (int8_t) ctx
->opcode
;
8584 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8589 int16_t imm
= (uint8_t) ctx
->opcode
;
8591 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8596 int16_t imm
= (uint8_t) ctx
->opcode
;
8598 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8605 funct
= (ctx
->opcode
>> 8) & 0x7;
8608 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
8609 ((int8_t)ctx
->opcode
) << 1);
8612 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
8613 ((int8_t)ctx
->opcode
) << 1);
8616 gen_ldst(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
8619 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
8620 ((int8_t)ctx
->opcode
) << 3);
8624 int do_ra
= ctx
->opcode
& (1 << 6);
8625 int do_s0
= ctx
->opcode
& (1 << 5);
8626 int do_s1
= ctx
->opcode
& (1 << 4);
8627 int framesize
= ctx
->opcode
& 0xf;
8629 if (framesize
== 0) {
8632 framesize
= framesize
<< 3;
8635 if (ctx
->opcode
& (1 << 7)) {
8636 gen_mips16_save(ctx
, 0, 0,
8637 do_ra
, do_s0
, do_s1
, framesize
);
8639 gen_mips16_restore(ctx
, 0, 0,
8640 do_ra
, do_s0
, do_s1
, framesize
);
8646 int rz
= xlat(ctx
->opcode
& 0x7);
8648 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
8649 ((ctx
->opcode
>> 5) & 0x7);
8650 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
8654 reg32
= ctx
->opcode
& 0x1f;
8655 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
8658 generate_exception(ctx
, EXCP_RI
);
8665 int16_t imm
= (uint8_t) ctx
->opcode
;
8667 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
8672 int16_t imm
= (uint8_t) ctx
->opcode
;
8674 gen_logic_imm(env
, OPC_XORI
, 24, rx
, imm
);
8677 #if defined(TARGET_MIPS64)
8680 gen_ldst(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
8684 gen_ldst(ctx
, OPC_LB
, ry
, rx
, offset
);
8687 gen_ldst(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
8690 gen_ldst(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
8693 gen_ldst(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
8696 gen_ldst(ctx
, OPC_LBU
, ry
, rx
, offset
);
8699 gen_ldst(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
8702 gen_ldst(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
8704 #if defined (TARGET_MIPS64)
8707 gen_ldst(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
8711 gen_ldst(ctx
, OPC_SB
, ry
, rx
, offset
);
8714 gen_ldst(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
8717 gen_ldst(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
8720 gen_ldst(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
8724 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
8727 switch (ctx
->opcode
& 0x3) {
8729 mips32_op
= OPC_ADDU
;
8732 mips32_op
= OPC_SUBU
;
8734 #if defined(TARGET_MIPS64)
8736 mips32_op
= OPC_DADDU
;
8740 mips32_op
= OPC_DSUBU
;
8745 generate_exception(ctx
, EXCP_RI
);
8749 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
8758 int nd
= (ctx
->opcode
>> 7) & 0x1;
8759 int link
= (ctx
->opcode
>> 6) & 0x1;
8760 int ra
= (ctx
->opcode
>> 5) & 0x1;
8763 op
= nd
? OPC_JALRC
: OPC_JALRS
;
8768 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
8775 /* XXX: not clear which exception should be raised
8776 * when in debug mode...
8778 check_insn(env
, ctx
, ISA_MIPS32
);
8779 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8780 generate_exception(ctx
, EXCP_DBp
);
8782 generate_exception(ctx
, EXCP_DBp
);
8786 gen_slt(env
, OPC_SLT
, 24, rx
, ry
);
8789 gen_slt(env
, OPC_SLTU
, 24, rx
, ry
);
8792 generate_exception(ctx
, EXCP_BREAK
);
8795 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
8798 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
8801 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
8803 #if defined (TARGET_MIPS64)
8806 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
8810 gen_logic(env
, OPC_XOR
, 24, rx
, ry
);
8813 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
8816 gen_logic(env
, OPC_AND
, rx
, rx
, ry
);
8819 gen_logic(env
, OPC_OR
, rx
, rx
, ry
);
8822 gen_logic(env
, OPC_XOR
, rx
, rx
, ry
);
8825 gen_logic(env
, OPC_NOR
, rx
, ry
, 0);
8828 gen_HILO(ctx
, OPC_MFHI
, rx
);
8832 case RR_RY_CNVT_ZEB
:
8833 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8835 case RR_RY_CNVT_ZEH
:
8836 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8838 case RR_RY_CNVT_SEB
:
8839 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8841 case RR_RY_CNVT_SEH
:
8842 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8844 #if defined (TARGET_MIPS64)
8845 case RR_RY_CNVT_ZEW
:
8847 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8849 case RR_RY_CNVT_SEW
:
8851 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8855 generate_exception(ctx
, EXCP_RI
);
8860 gen_HILO(ctx
, OPC_MFLO
, rx
);
8862 #if defined (TARGET_MIPS64)
8865 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
8869 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
8873 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
8877 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
8881 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
8884 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
8887 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
8890 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
8892 #if defined (TARGET_MIPS64)
8895 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
8899 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
8903 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
8907 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
8911 generate_exception(ctx
, EXCP_RI
);
8915 case M16_OPC_EXTEND
:
8916 decode_extended_mips16_opc(env
, ctx
, is_branch
);
8919 #if defined(TARGET_MIPS64)
8921 funct
= (ctx
->opcode
>> 8) & 0x7;
8922 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
8926 generate_exception(ctx
, EXCP_RI
);
8933 /* microMIPS extension to MIPS32 */
8935 /* microMIPS32 major opcodes */
8974 /* 0x20 is reserved */
8984 /* 0x28 and 0x29 are reserved */
8994 /* 0x30 and 0x31 are reserved */
9004 /* 0x38 and 0x39 are reserved */
9015 /* POOL32A encoding of minor opcode field */
9018 /* These opcodes are distinguished only by bits 9..6; those bits are
9019 * what are recorded below. */
9045 /* The following can be distinguished by their lower 6 bits. */
9051 /* POOL32AXF encoding of minor opcode field extension */
9065 /* bits 13..12 for 0x01 */
9071 /* bits 13..12 for 0x2a */
9077 /* bits 13..12 for 0x32 */
9081 /* bits 15..12 for 0x2c */
9097 /* bits 15..12 for 0x34 */
9105 /* bits 15..12 for 0x3c */
9107 JR
= 0x0, /* alias */
9112 /* bits 15..12 for 0x05 */
9116 /* bits 15..12 for 0x0d */
9126 /* bits 15..12 for 0x15 */
9132 /* bits 15..12 for 0x1d */
9136 /* bits 15..12 for 0x2d */
9141 /* bits 15..12 for 0x35 */
9148 /* POOL32B encoding of minor opcode field (bits 15..12) */
9164 /* POOL32C encoding of minor opcode field (bits 15..12) */
9172 /* 0xa is reserved */
9179 /* 0x6 is reserved */
9185 /* POOL32F encoding of minor opcode field (bits 5..0) */
9188 /* These are the bit 7..6 values */
9199 /* These are the bit 8..6 values */
9243 CABS_COND_FMT
= 0x1c, /* MIPS3D */
9247 /* POOL32Fxf encoding of minor opcode extension field */
9285 /* POOL32I encoding of minor opcode field (bits 25..21) */
9310 /* These overlap and are distinguished by bit16 of the instruction */
9319 /* POOL16A encoding of minor opcode field */
9326 /* POOL16B encoding of minor opcode field */
9333 /* POOL16C encoding of minor opcode field */
9353 /* POOL16D encoding of minor opcode field */
9360 /* POOL16E encoding of minor opcode field */
9367 static int mmreg (int r
)
9369 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
9374 /* Used for 16-bit store instructions. */
9375 static int mmreg2 (int r
)
9377 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
9382 #define uMIPS_RD(op) ((op >> 7) & 0x7)
9383 #define uMIPS_RS(op) ((op >> 4) & 0x7)
9384 #define uMIPS_RS2(op) uMIPS_RS(op)
9385 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
9386 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
9387 #define uMIPS_RS5(op) (op & 0x1f)
9389 /* Signed immediate */
9390 #define SIMM(op, start, width) \
9391 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
9394 /* Zero-extended immediate */
9395 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
9397 static void gen_addiur1sp (CPUState
*env
, DisasContext
*ctx
)
9399 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9401 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
9404 static void gen_addiur2 (CPUState
*env
, DisasContext
*ctx
)
9406 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
9407 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9408 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9410 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
9413 static void gen_addiusp (CPUState
*env
, DisasContext
*ctx
)
9415 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
9419 decoded
= 256 + encoded
;
9420 } else if (encoded
<= 255) {
9422 } else if (encoded
<= 509) {
9423 decoded
= encoded
- 512;
9425 decoded
= encoded
- 768;
9428 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
9431 static void gen_addius5 (CPUState
*env
, DisasContext
*ctx
)
9433 int imm
= SIMM(ctx
->opcode
, 1, 4);
9434 int rd
= (ctx
->opcode
>> 5) & 0x1f;
9436 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
9439 static void gen_andi16 (CPUState
*env
, DisasContext
*ctx
)
9441 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
9442 31, 32, 63, 64, 255, 32768, 65535 };
9443 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9444 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9445 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
9447 gen_logic_imm(env
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
9450 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
9451 int base
, int16_t offset
)
9456 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9457 generate_exception(ctx
, EXCP_RI
);
9461 t0
= tcg_temp_new();
9463 gen_base_offset_addr(ctx
, t0
, base
, offset
);
9465 t1
= tcg_const_tl(reglist
);
9466 t2
= tcg_const_i32(ctx
->mem_idx
);
9468 save_cpu_state(ctx
, 1);
9471 gen_helper_lwm(t0
, t1
, t2
);
9474 gen_helper_swm(t0
, t1
, t2
);
9476 #ifdef TARGET_MIPS64
9478 gen_helper_ldm(t0
, t1
, t2
);
9481 gen_helper_sdm(t0
, t1
, t2
);
9485 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
9488 tcg_temp_free_i32(t2
);
9492 static void gen_pool16c_insn (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
9494 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
9495 int rs
= mmreg(ctx
->opcode
& 0x7);
9498 switch (((ctx
->opcode
) >> 4) & 0x3f) {
9503 gen_logic(env
, OPC_NOR
, rd
, rs
, 0);
9509 gen_logic(env
, OPC_XOR
, rd
, rd
, rs
);
9515 gen_logic(env
, OPC_AND
, rd
, rd
, rs
);
9521 gen_logic(env
, OPC_OR
, rd
, rd
, rs
);
9528 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9529 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9531 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
9540 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9541 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9543 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
9550 int reg
= ctx
->opcode
& 0x1f;
9552 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9559 int reg
= ctx
->opcode
& 0x1f;
9561 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9562 /* Let normal delay slot handling in our caller take us
9563 to the branch target. */
9575 int reg
= ctx
->opcode
& 0x1f;
9577 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
9583 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
9587 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
9590 generate_exception(ctx
, EXCP_BREAK
);
9593 /* XXX: not clear which exception should be raised
9594 * when in debug mode...
9596 check_insn(env
, ctx
, ISA_MIPS32
);
9597 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9598 generate_exception(ctx
, EXCP_DBp
);
9600 generate_exception(ctx
, EXCP_DBp
);
9606 int imm
= ZIMM(ctx
->opcode
, 0, 5);
9608 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
9609 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
9610 /* Let normal delay slot handling in our caller take us
9611 to the branch target. */
9615 generate_exception(ctx
, EXCP_RI
);
9620 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
9622 TCGv t0
= tcg_temp_new();
9623 TCGv t1
= tcg_temp_new();
9625 gen_load_gpr(t0
, base
);
9628 gen_load_gpr(t1
, index
);
9629 tcg_gen_shli_tl(t1
, t1
, 2);
9630 gen_op_addr_add(ctx
, t0
, t1
, t0
);
9633 save_cpu_state(ctx
, 0);
9634 op_ldst_lw(t1
, t0
, ctx
);
9635 gen_store_gpr(t1
, rd
);
9641 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
9642 int base
, int16_t offset
)
9644 const char *opn
= "ldst_pair";
9647 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31 || rd
== base
) {
9648 generate_exception(ctx
, EXCP_RI
);
9652 t0
= tcg_temp_new();
9653 t1
= tcg_temp_new();
9655 gen_base_offset_addr(ctx
, t0
, base
, offset
);
9659 save_cpu_state(ctx
, 0);
9660 op_ldst_lw(t1
, t0
, ctx
);
9661 gen_store_gpr(t1
, rd
);
9662 tcg_gen_movi_tl(t1
, 4);
9663 gen_op_addr_add(ctx
, t0
, t0
, t1
);
9664 op_ldst_lw(t1
, t0
, ctx
);
9665 gen_store_gpr(t1
, rd
+1);
9669 save_cpu_state(ctx
, 1);
9670 gen_load_gpr(t1
, rd
);
9671 op_ldst_sw(t1
, t0
, ctx
);
9672 tcg_gen_movi_tl(t1
, 4);
9673 gen_op_addr_add(ctx
, t0
, t0
, t1
);
9674 gen_load_gpr(t1
, rd
+1);
9675 op_ldst_sw(t1
, t0
, ctx
);
9678 #ifdef TARGET_MIPS64
9680 save_cpu_state(ctx
, 0);
9681 op_ldst_ld(t1
, t0
, ctx
);
9682 gen_store_gpr(t1
, rd
);
9683 tcg_gen_movi_tl(t1
, 8);
9684 gen_op_addr_add(ctx
, t0
, t0
, t1
);
9685 op_ldst_ld(t1
, t0
, ctx
);
9686 gen_store_gpr(t1
, rd
+1);
9690 save_cpu_state(ctx
, 1);
9691 gen_load_gpr(t1
, rd
);
9692 op_ldst_sd(t1
, t0
, ctx
);
9693 tcg_gen_movi_tl(t1
, 8);
9694 gen_op_addr_add(ctx
, t0
, t0
, t1
);
9695 gen_load_gpr(t1
, rd
+1);
9696 op_ldst_sd(t1
, t0
, ctx
);
9701 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
9706 static void gen_pool32axf (CPUState
*env
, DisasContext
*ctx
, int rt
, int rs
,
9709 int extension
= (ctx
->opcode
>> 6) & 0x3f;
9710 int minor
= (ctx
->opcode
>> 12) & 0xf;
9713 switch (extension
) {
9715 mips32_op
= OPC_TEQ
;
9718 mips32_op
= OPC_TGE
;
9721 mips32_op
= OPC_TGEU
;
9724 mips32_op
= OPC_TLT
;
9727 mips32_op
= OPC_TLTU
;
9730 mips32_op
= OPC_TNE
;
9732 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
9734 #ifndef CONFIG_USER_ONLY
9741 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
9746 TCGv t0
= tcg_temp_new();
9748 gen_load_gpr(t0
, rt
);
9749 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
9757 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
9760 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
9763 mips32_op
= OPC_CLO
;
9766 mips32_op
= OPC_CLZ
;
9768 check_insn(env
, ctx
, ISA_MIPS32
);
9769 gen_cl(ctx
, mips32_op
, rt
, rs
);
9772 gen_rdhwr(env
, ctx
, rt
, rs
);
9775 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
9778 mips32_op
= OPC_MULT
;
9781 mips32_op
= OPC_MULTU
;
9784 mips32_op
= OPC_DIV
;
9787 mips32_op
= OPC_DIVU
;
9790 mips32_op
= OPC_MADD
;
9793 mips32_op
= OPC_MADDU
;
9796 mips32_op
= OPC_MSUB
;
9799 mips32_op
= OPC_MSUBU
;
9801 check_insn(env
, ctx
, ISA_MIPS32
);
9802 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
9805 goto pool32axf_invalid
;
9816 generate_exception_err(ctx
, EXCP_CpU
, 2);
9819 goto pool32axf_invalid
;
9826 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
9831 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
9835 goto pool32axf_invalid
;
9841 check_insn(env
, ctx
, ISA_MIPS32R2
);
9842 gen_load_srsgpr(rt
, rs
);
9845 check_insn(env
, ctx
, ISA_MIPS32R2
);
9846 gen_store_srsgpr(rt
, rs
);
9849 goto pool32axf_invalid
;
9852 #ifndef CONFIG_USER_ONLY
9856 mips32_op
= OPC_TLBP
;
9859 mips32_op
= OPC_TLBR
;
9862 mips32_op
= OPC_TLBWI
;
9865 mips32_op
= OPC_TLBWR
;
9868 mips32_op
= OPC_WAIT
;
9871 mips32_op
= OPC_DERET
;
9874 mips32_op
= OPC_ERET
;
9876 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
9879 goto pool32axf_invalid
;
9886 TCGv t0
= tcg_temp_new();
9888 save_cpu_state(ctx
, 1);
9890 gen_store_gpr(t0
, rs
);
9891 /* Stop translation as we may have switched the execution mode */
9892 ctx
->bstate
= BS_STOP
;
9898 TCGv t0
= tcg_temp_new();
9900 save_cpu_state(ctx
, 1);
9902 gen_store_gpr(t0
, rs
);
9903 /* Stop translation as we may have switched the execution mode */
9904 ctx
->bstate
= BS_STOP
;
9909 goto pool32axf_invalid
;
9919 generate_exception(ctx
, EXCP_SYSCALL
);
9920 ctx
->bstate
= BS_STOP
;
9923 check_insn(env
, ctx
, ISA_MIPS32
);
9924 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9925 generate_exception(ctx
, EXCP_DBp
);
9927 generate_exception(ctx
, EXCP_DBp
);
9931 goto pool32axf_invalid
;
9937 gen_HILO(ctx
, OPC_MFHI
, rs
);
9940 gen_HILO(ctx
, OPC_MFLO
, rs
);
9943 gen_HILO(ctx
, OPC_MTHI
, rs
);
9946 gen_HILO(ctx
, OPC_MTLO
, rs
);
9949 goto pool32axf_invalid
;
9954 MIPS_INVAL("pool32axf");
9955 generate_exception(ctx
, EXCP_RI
);
9960 /* Values for microMIPS fmt field. Variable-width, depending on which
9961 formats the instruction supports. */
9980 static void gen_pool32fxf (CPUState
*env
, DisasContext
*ctx
, int rt
, int rs
)
9982 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
9985 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
9986 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
9987 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
9989 switch (extension
) {
9990 case FLOAT_1BIT_FMT(CFC1
, 0):
9991 mips32_op
= OPC_CFC1
;
9993 case FLOAT_1BIT_FMT(CTC1
, 0):
9994 mips32_op
= OPC_CTC1
;
9996 case FLOAT_1BIT_FMT(MFC1
, 0):
9997 mips32_op
= OPC_MFC1
;
9999 case FLOAT_1BIT_FMT(MTC1
, 0):
10000 mips32_op
= OPC_MTC1
;
10002 case FLOAT_1BIT_FMT(MFHC1
, 0):
10003 mips32_op
= OPC_MFHC1
;
10005 case FLOAT_1BIT_FMT(MTHC1
, 0):
10006 mips32_op
= OPC_MTHC1
;
10008 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10011 /* Reciprocal square root */
10012 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10013 mips32_op
= OPC_RSQRT_S
;
10015 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10016 mips32_op
= OPC_RSQRT_D
;
10020 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10021 mips32_op
= OPC_SQRT_S
;
10023 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10024 mips32_op
= OPC_SQRT_D
;
10028 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10029 mips32_op
= OPC_RECIP_S
;
10031 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10032 mips32_op
= OPC_RECIP_D
;
10036 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10037 mips32_op
= OPC_FLOOR_L_S
;
10039 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10040 mips32_op
= OPC_FLOOR_L_D
;
10042 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10043 mips32_op
= OPC_FLOOR_W_S
;
10045 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10046 mips32_op
= OPC_FLOOR_W_D
;
10050 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10051 mips32_op
= OPC_CEIL_L_S
;
10053 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10054 mips32_op
= OPC_CEIL_L_D
;
10056 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10057 mips32_op
= OPC_CEIL_W_S
;
10059 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10060 mips32_op
= OPC_CEIL_W_D
;
10064 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10065 mips32_op
= OPC_TRUNC_L_S
;
10067 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10068 mips32_op
= OPC_TRUNC_L_D
;
10070 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10071 mips32_op
= OPC_TRUNC_W_S
;
10073 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10074 mips32_op
= OPC_TRUNC_W_D
;
10078 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10079 mips32_op
= OPC_ROUND_L_S
;
10081 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10082 mips32_op
= OPC_ROUND_L_D
;
10084 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10085 mips32_op
= OPC_ROUND_W_S
;
10087 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10088 mips32_op
= OPC_ROUND_W_D
;
10091 /* Integer to floating-point conversion */
10092 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10093 mips32_op
= OPC_CVT_L_S
;
10095 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10096 mips32_op
= OPC_CVT_L_D
;
10098 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10099 mips32_op
= OPC_CVT_W_S
;
10101 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10102 mips32_op
= OPC_CVT_W_D
;
10105 /* Paired-foo conversions */
10106 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10107 mips32_op
= OPC_CVT_S_PL
;
10109 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10110 mips32_op
= OPC_CVT_S_PU
;
10112 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10113 mips32_op
= OPC_CVT_PW_PS
;
10115 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10116 mips32_op
= OPC_CVT_PS_PW
;
10119 /* Floating-point moves */
10120 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10121 mips32_op
= OPC_MOV_S
;
10123 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10124 mips32_op
= OPC_MOV_D
;
10126 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10127 mips32_op
= OPC_MOV_PS
;
10130 /* Absolute value */
10131 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10132 mips32_op
= OPC_ABS_S
;
10134 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10135 mips32_op
= OPC_ABS_D
;
10137 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10138 mips32_op
= OPC_ABS_PS
;
10142 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10143 mips32_op
= OPC_NEG_S
;
10145 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10146 mips32_op
= OPC_NEG_D
;
10148 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10149 mips32_op
= OPC_NEG_PS
;
10152 /* Reciprocal square root step */
10153 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10154 mips32_op
= OPC_RSQRT1_S
;
10156 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10157 mips32_op
= OPC_RSQRT1_D
;
10159 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10160 mips32_op
= OPC_RSQRT1_PS
;
10163 /* Reciprocal step */
10164 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10165 mips32_op
= OPC_RECIP1_S
;
10167 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10168 mips32_op
= OPC_RECIP1_S
;
10170 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10171 mips32_op
= OPC_RECIP1_PS
;
10174 /* Conversions from double */
10175 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
10176 mips32_op
= OPC_CVT_D_S
;
10178 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
10179 mips32_op
= OPC_CVT_D_W
;
10181 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
10182 mips32_op
= OPC_CVT_D_L
;
10185 /* Conversions from single */
10186 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
10187 mips32_op
= OPC_CVT_S_D
;
10189 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
10190 mips32_op
= OPC_CVT_S_W
;
10192 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
10193 mips32_op
= OPC_CVT_S_L
;
10195 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
10198 /* Conditional moves on floating-point codes */
10199 case COND_FLOAT_MOV(MOVT
, 0):
10200 case COND_FLOAT_MOV(MOVT
, 1):
10201 case COND_FLOAT_MOV(MOVT
, 2):
10202 case COND_FLOAT_MOV(MOVT
, 3):
10203 case COND_FLOAT_MOV(MOVT
, 4):
10204 case COND_FLOAT_MOV(MOVT
, 5):
10205 case COND_FLOAT_MOV(MOVT
, 6):
10206 case COND_FLOAT_MOV(MOVT
, 7):
10207 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
10209 case COND_FLOAT_MOV(MOVF
, 0):
10210 case COND_FLOAT_MOV(MOVF
, 1):
10211 case COND_FLOAT_MOV(MOVF
, 2):
10212 case COND_FLOAT_MOV(MOVF
, 3):
10213 case COND_FLOAT_MOV(MOVF
, 4):
10214 case COND_FLOAT_MOV(MOVF
, 5):
10215 case COND_FLOAT_MOV(MOVF
, 6):
10216 case COND_FLOAT_MOV(MOVF
, 7):
10217 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
10220 MIPS_INVAL("pool32fxf");
10221 generate_exception(ctx
, EXCP_RI
);
10226 static void decode_micromips32_opc (CPUState
*env
, DisasContext
*ctx
,
10227 uint16_t insn_hw1
, int *is_branch
)
10231 int rt
, rs
, rd
, rr
;
10233 uint32_t op
, minor
, mips32_op
;
10234 uint32_t cond
, fmt
, cc
;
10236 insn
= lduw_code(ctx
->pc
+ 2);
10237 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
10239 rt
= (ctx
->opcode
>> 21) & 0x1f;
10240 rs
= (ctx
->opcode
>> 16) & 0x1f;
10241 rd
= (ctx
->opcode
>> 11) & 0x1f;
10242 rr
= (ctx
->opcode
>> 6) & 0x1f;
10243 imm
= (int16_t) ctx
->opcode
;
10245 op
= (ctx
->opcode
>> 26) & 0x3f;
10248 minor
= ctx
->opcode
& 0x3f;
10251 minor
= (ctx
->opcode
>> 6) & 0xf;
10254 mips32_op
= OPC_SLL
;
10257 mips32_op
= OPC_SRA
;
10260 mips32_op
= OPC_SRL
;
10263 mips32_op
= OPC_ROTR
;
10265 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
10268 goto pool32a_invalid
;
10272 minor
= (ctx
->opcode
>> 6) & 0xf;
10276 mips32_op
= OPC_ADD
;
10279 mips32_op
= OPC_ADDU
;
10282 mips32_op
= OPC_SUB
;
10285 mips32_op
= OPC_SUBU
;
10288 mips32_op
= OPC_MUL
;
10290 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10294 mips32_op
= OPC_SLLV
;
10297 mips32_op
= OPC_SRLV
;
10300 mips32_op
= OPC_SRAV
;
10303 mips32_op
= OPC_ROTRV
;
10305 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10307 /* Logical operations */
10309 mips32_op
= OPC_AND
;
10312 mips32_op
= OPC_OR
;
10315 mips32_op
= OPC_NOR
;
10318 mips32_op
= OPC_XOR
;
10320 gen_logic(env
, mips32_op
, rd
, rs
, rt
);
10322 /* Set less than */
10324 mips32_op
= OPC_SLT
;
10327 mips32_op
= OPC_SLTU
;
10329 gen_slt(env
, mips32_op
, rd
, rs
, rt
);
10332 goto pool32a_invalid
;
10336 minor
= (ctx
->opcode
>> 6) & 0xf;
10338 /* Conditional moves */
10340 mips32_op
= OPC_MOVN
;
10343 mips32_op
= OPC_MOVZ
;
10345 gen_cond_move(env
, mips32_op
, rd
, rs
, rt
);
10348 gen_ldxs(ctx
, rs
, rt
, rd
);
10351 goto pool32a_invalid
;
10355 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
10358 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
10361 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
10364 generate_exception(ctx
, EXCP_BREAK
);
10368 MIPS_INVAL("pool32a");
10369 generate_exception(ctx
, EXCP_RI
);
10374 minor
= (ctx
->opcode
>> 12) & 0xf;
10377 /* Treat as no-op. */
10381 /* COP2: Not implemented. */
10382 generate_exception_err(ctx
, EXCP_CpU
, 2);
10386 #ifdef TARGET_MIPS64
10390 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10394 #ifdef TARGET_MIPS64
10398 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10401 MIPS_INVAL("pool32b");
10402 generate_exception(ctx
, EXCP_RI
);
10407 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
10408 minor
= ctx
->opcode
& 0x3f;
10409 check_cp1_enabled(ctx
);
10412 mips32_op
= OPC_ALNV_PS
;
10415 mips32_op
= OPC_MADD_S
;
10418 mips32_op
= OPC_MADD_D
;
10421 mips32_op
= OPC_MADD_PS
;
10424 mips32_op
= OPC_MSUB_S
;
10427 mips32_op
= OPC_MSUB_D
;
10430 mips32_op
= OPC_MSUB_PS
;
10433 mips32_op
= OPC_NMADD_S
;
10436 mips32_op
= OPC_NMADD_D
;
10439 mips32_op
= OPC_NMADD_PS
;
10442 mips32_op
= OPC_NMSUB_S
;
10445 mips32_op
= OPC_NMSUB_D
;
10448 mips32_op
= OPC_NMSUB_PS
;
10450 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
10452 case CABS_COND_FMT
:
10453 cond
= (ctx
->opcode
>> 6) & 0xf;
10454 cc
= (ctx
->opcode
>> 13) & 0x7;
10455 fmt
= (ctx
->opcode
>> 10) & 0x3;
10458 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
10461 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
10464 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
10467 goto pool32f_invalid
;
10471 cond
= (ctx
->opcode
>> 6) & 0xf;
10472 cc
= (ctx
->opcode
>> 13) & 0x7;
10473 fmt
= (ctx
->opcode
>> 10) & 0x3;
10476 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
10479 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
10482 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
10485 goto pool32f_invalid
;
10489 gen_pool32fxf(env
, ctx
, rt
, rs
);
10493 switch ((ctx
->opcode
>> 6) & 0x7) {
10495 mips32_op
= OPC_PLL_PS
;
10498 mips32_op
= OPC_PLU_PS
;
10501 mips32_op
= OPC_PUL_PS
;
10504 mips32_op
= OPC_PUU_PS
;
10507 mips32_op
= OPC_CVT_PS_S
;
10509 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10512 goto pool32f_invalid
;
10517 switch ((ctx
->opcode
>> 6) & 0x7) {
10519 mips32_op
= OPC_LWXC1
;
10522 mips32_op
= OPC_SWXC1
;
10525 mips32_op
= OPC_LDXC1
;
10528 mips32_op
= OPC_SDXC1
;
10531 mips32_op
= OPC_LUXC1
;
10534 mips32_op
= OPC_SUXC1
;
10536 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
10539 goto pool32f_invalid
;
10544 fmt
= (ctx
->opcode
>> 9) & 0x3;
10545 switch ((ctx
->opcode
>> 6) & 0x7) {
10549 mips32_op
= OPC_RSQRT2_S
;
10552 mips32_op
= OPC_RSQRT2_D
;
10555 mips32_op
= OPC_RSQRT2_PS
;
10558 goto pool32f_invalid
;
10564 mips32_op
= OPC_RECIP2_S
;
10567 mips32_op
= OPC_RECIP2_D
;
10570 mips32_op
= OPC_RECIP2_PS
;
10573 goto pool32f_invalid
;
10577 mips32_op
= OPC_ADDR_PS
;
10580 mips32_op
= OPC_MULR_PS
;
10582 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10585 goto pool32f_invalid
;
10589 /* MOV[FT].fmt and PREFX */
10590 cc
= (ctx
->opcode
>> 13) & 0x7;
10591 fmt
= (ctx
->opcode
>> 9) & 0x3;
10592 switch ((ctx
->opcode
>> 6) & 0x7) {
10596 gen_movcf_s(rs
, rt
, cc
, 0);
10599 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
10602 gen_movcf_ps(rs
, rt
, cc
, 0);
10605 goto pool32f_invalid
;
10611 gen_movcf_s(rs
, rt
, cc
, 1);
10614 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
10617 gen_movcf_ps(rs
, rt
, cc
, 1);
10620 goto pool32f_invalid
;
10626 goto pool32f_invalid
;
10629 #define FINSN_3ARG_SDPS(prfx) \
10630 switch ((ctx->opcode >> 8) & 0x3) { \
10632 mips32_op = OPC_##prfx##_S; \
10635 mips32_op = OPC_##prfx##_D; \
10637 case FMT_SDPS_PS: \
10638 mips32_op = OPC_##prfx##_PS; \
10641 goto pool32f_invalid; \
10644 /* regular FP ops */
10645 switch ((ctx
->opcode
>> 6) & 0x3) {
10647 FINSN_3ARG_SDPS(ADD
);
10650 FINSN_3ARG_SDPS(SUB
);
10653 FINSN_3ARG_SDPS(MUL
);
10656 fmt
= (ctx
->opcode
>> 8) & 0x3;
10658 mips32_op
= OPC_DIV_D
;
10659 } else if (fmt
== 0) {
10660 mips32_op
= OPC_DIV_S
;
10662 goto pool32f_invalid
;
10666 goto pool32f_invalid
;
10671 switch ((ctx
->opcode
>> 6) & 0x3) {
10673 FINSN_3ARG_SDPS(MOVN
);
10676 FINSN_3ARG_SDPS(MOVZ
);
10679 goto pool32f_invalid
;
10683 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10687 MIPS_INVAL("pool32f");
10688 generate_exception(ctx
, EXCP_RI
);
10692 generate_exception_err(ctx
, EXCP_CpU
, 1);
10696 minor
= (ctx
->opcode
>> 21) & 0x1f;
10699 mips32_op
= OPC_BLTZ
;
10702 mips32_op
= OPC_BLTZAL
;
10705 mips32_op
= OPC_BLTZALS
;
10708 mips32_op
= OPC_BGEZ
;
10711 mips32_op
= OPC_BGEZAL
;
10714 mips32_op
= OPC_BGEZALS
;
10717 mips32_op
= OPC_BLEZ
;
10720 mips32_op
= OPC_BGTZ
;
10722 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
10728 mips32_op
= OPC_TLTI
;
10731 mips32_op
= OPC_TGEI
;
10734 mips32_op
= OPC_TLTIU
;
10737 mips32_op
= OPC_TGEIU
;
10740 mips32_op
= OPC_TNEI
;
10743 mips32_op
= OPC_TEQI
;
10745 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
10750 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
10751 4, rs
, 0, imm
<< 1);
10752 /* Compact branches don't have a delay slot, so just let
10753 the normal delay slot handling take us to the branch
10757 gen_logic_imm(env
, OPC_LUI
, rs
, -1, imm
);
10763 /* COP2: Not implemented. */
10764 generate_exception_err(ctx
, EXCP_CpU
, 2);
10767 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
10770 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
10773 mips32_op
= OPC_BC1FANY4
;
10776 mips32_op
= OPC_BC1TANY4
;
10779 check_insn(env
, ctx
, ASE_MIPS3D
);
10782 gen_compute_branch1(env
, ctx
, mips32_op
,
10783 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
10788 /* MIPS DSP: not implemented */
10791 MIPS_INVAL("pool32i");
10792 generate_exception(ctx
, EXCP_RI
);
10797 minor
= (ctx
->opcode
>> 12) & 0xf;
10800 mips32_op
= OPC_LWL
;
10803 mips32_op
= OPC_SWL
;
10806 mips32_op
= OPC_LWR
;
10809 mips32_op
= OPC_SWR
;
10811 #if defined(TARGET_MIPS64)
10813 mips32_op
= OPC_LDL
;
10816 mips32_op
= OPC_SDL
;
10819 mips32_op
= OPC_LDR
;
10822 mips32_op
= OPC_SDR
;
10825 mips32_op
= OPC_LWU
;
10828 mips32_op
= OPC_LLD
;
10832 mips32_op
= OPC_LL
;
10834 gen_ldst(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10837 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10839 #if defined(TARGET_MIPS64)
10841 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10845 /* Treat as no-op */
10848 MIPS_INVAL("pool32c");
10849 generate_exception(ctx
, EXCP_RI
);
10854 mips32_op
= OPC_ADDI
;
10857 mips32_op
= OPC_ADDIU
;
10859 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
10862 /* Logical operations */
10864 mips32_op
= OPC_ORI
;
10867 mips32_op
= OPC_XORI
;
10870 mips32_op
= OPC_ANDI
;
10872 gen_logic_imm(env
, mips32_op
, rt
, rs
, imm
);
10875 /* Set less than immediate */
10877 mips32_op
= OPC_SLTI
;
10880 mips32_op
= OPC_SLTIU
;
10882 gen_slt_imm(env
, mips32_op
, rt
, rs
, imm
);
10885 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
10886 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
10890 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
10891 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
10895 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
10899 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
10903 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
10904 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
10908 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
10909 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
10912 /* Floating point (COP1) */
10914 mips32_op
= OPC_LWC1
;
10917 mips32_op
= OPC_LDC1
;
10920 mips32_op
= OPC_SWC1
;
10923 mips32_op
= OPC_SDC1
;
10925 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
10929 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
10930 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
10932 gen_addiupc(ctx
, reg
, offset
, 0, 0);
10935 /* Loads and stores */
10937 mips32_op
= OPC_LB
;
10940 mips32_op
= OPC_LBU
;
10943 mips32_op
= OPC_LH
;
10946 mips32_op
= OPC_LHU
;
10949 mips32_op
= OPC_LW
;
10951 #ifdef TARGET_MIPS64
10953 mips32_op
= OPC_LD
;
10956 mips32_op
= OPC_SD
;
10960 mips32_op
= OPC_SB
;
10963 mips32_op
= OPC_SH
;
10966 mips32_op
= OPC_SW
;
10968 gen_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
10971 generate_exception(ctx
, EXCP_RI
);
10976 static int decode_micromips_opc (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
10980 /* make sure instructions are on a halfword boundary */
10981 if (ctx
->pc
& 0x1) {
10982 env
->CP0_BadVAddr
= ctx
->pc
;
10983 generate_exception(ctx
, EXCP_AdEL
);
10984 ctx
->bstate
= BS_STOP
;
10988 op
= (ctx
->opcode
>> 10) & 0x3f;
10989 /* Enforce properly-sized instructions in a delay slot */
10990 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10991 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11025 case POOL48A
: /* ??? */
11030 if (bits
& MIPS_HFLAG_BDS16
) {
11031 generate_exception(ctx
, EXCP_RI
);
11032 /* Just stop translation; the user is confused. */
11033 ctx
->bstate
= BS_STOP
;
11058 if (bits
& MIPS_HFLAG_BDS32
) {
11059 generate_exception(ctx
, EXCP_RI
);
11060 /* Just stop translation; the user is confused. */
11061 ctx
->bstate
= BS_STOP
;
11072 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11073 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11074 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11077 switch (ctx
->opcode
& 0x1) {
11086 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11091 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11092 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11093 int amount
= (ctx
->opcode
>> 1) & 0x7;
11095 amount
= amount
== 0 ? 8 : amount
;
11097 switch (ctx
->opcode
& 0x1) {
11106 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11110 gen_pool16c_insn(env
, ctx
, is_branch
);
11114 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11115 int rb
= 28; /* GP */
11116 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11118 gen_ldst(ctx
, OPC_LW
, rd
, rb
, offset
);
11122 if (ctx
->opcode
& 1) {
11123 generate_exception(ctx
, EXCP_RI
);
11126 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11127 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11128 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11129 int rd
, rs
, re
, rt
;
11130 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11131 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11132 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11134 rd
= rd_enc
[enc_dest
];
11135 re
= re_enc
[enc_dest
];
11136 rs
= rs_rt_enc
[enc_rs
];
11137 rt
= rs_rt_enc
[enc_rt
];
11139 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11140 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11145 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11146 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11147 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11148 offset
= (offset
== 0xf ? -1 : offset
);
11150 gen_ldst(ctx
, OPC_LBU
, rd
, rb
, offset
);
11155 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11156 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11157 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11159 gen_ldst(ctx
, OPC_LHU
, rd
, rb
, offset
);
11164 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11165 int rb
= 29; /* SP */
11166 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11168 gen_ldst(ctx
, OPC_LW
, rd
, rb
, offset
);
11173 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11174 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11175 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11177 gen_ldst(ctx
, OPC_LW
, rd
, rb
, offset
);
11182 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11183 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11184 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11186 gen_ldst(ctx
, OPC_SB
, rd
, rb
, offset
);
11191 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11192 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11193 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11195 gen_ldst(ctx
, OPC_SH
, rd
, rb
, offset
);
11200 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11201 int rb
= 29; /* SP */
11202 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11204 gen_ldst(ctx
, OPC_SW
, rd
, rb
, offset
);
11209 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11210 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11211 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11213 gen_ldst(ctx
, OPC_SW
, rd
, rb
, offset
);
11218 int rd
= uMIPS_RD5(ctx
->opcode
);
11219 int rs
= uMIPS_RS5(ctx
->opcode
);
11221 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11225 gen_andi16(env
, ctx
);
11228 switch (ctx
->opcode
& 0x1) {
11230 gen_addius5(env
, ctx
);
11233 gen_addiusp(env
, ctx
);
11238 switch (ctx
->opcode
& 0x1) {
11240 gen_addiur2(env
, ctx
);
11243 gen_addiur1sp(env
, ctx
);
11248 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
11249 SIMM(ctx
->opcode
, 0, 10) << 1);
11254 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
11255 mmreg(uMIPS_RD(ctx
->opcode
)),
11256 0, SIMM(ctx
->opcode
, 0, 7) << 1);
11261 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
11262 int imm
= ZIMM(ctx
->opcode
, 0, 7);
11264 imm
= (imm
== 0x7f ? -1 : imm
);
11265 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
11275 generate_exception(ctx
, EXCP_RI
);
11278 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
11285 /* SmartMIPS extension to MIPS32 */
11287 #if defined(TARGET_MIPS64)
11289 /* MDMX extension to MIPS64 */
11293 static void decode_opc (CPUState
*env
, DisasContext
*ctx
, int *is_branch
)
11296 int rs
, rt
, rd
, sa
;
11297 uint32_t op
, op1
, op2
;
11300 /* make sure instructions are on a word boundary */
11301 if (ctx
->pc
& 0x3) {
11302 env
->CP0_BadVAddr
= ctx
->pc
;
11303 generate_exception(ctx
, EXCP_AdEL
);
11307 /* Handle blikely not taken case */
11308 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
11309 int l1
= gen_new_label();
11311 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
11312 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11313 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
11314 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
11318 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
11319 tcg_gen_debug_insn_start(ctx
->pc
);
11321 op
= MASK_OP_MAJOR(ctx
->opcode
);
11322 rs
= (ctx
->opcode
>> 21) & 0x1f;
11323 rt
= (ctx
->opcode
>> 16) & 0x1f;
11324 rd
= (ctx
->opcode
>> 11) & 0x1f;
11325 sa
= (ctx
->opcode
>> 6) & 0x1f;
11326 imm
= (int16_t)ctx
->opcode
;
11329 op1
= MASK_SPECIAL(ctx
->opcode
);
11331 case OPC_SLL
: /* Shift with immediate */
11333 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11336 switch ((ctx
->opcode
>> 21) & 0x1f) {
11338 /* rotr is decoded as srl on non-R2 CPUs */
11339 if (env
->insn_flags
& ISA_MIPS32R2
) {
11344 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11347 generate_exception(ctx
, EXCP_RI
);
11351 case OPC_MOVN
: /* Conditional move */
11353 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
11354 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
11355 gen_cond_move(env
, op1
, rd
, rs
, rt
);
11357 case OPC_ADD
... OPC_SUBU
:
11358 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11360 case OPC_SLLV
: /* Shifts */
11362 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11365 switch ((ctx
->opcode
>> 6) & 0x1f) {
11367 /* rotrv is decoded as srlv on non-R2 CPUs */
11368 if (env
->insn_flags
& ISA_MIPS32R2
) {
11373 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11376 generate_exception(ctx
, EXCP_RI
);
11380 case OPC_SLT
: /* Set on less than */
11382 gen_slt(env
, op1
, rd
, rs
, rt
);
11384 case OPC_AND
: /* Logic*/
11388 gen_logic(env
, op1
, rd
, rs
, rt
);
11390 case OPC_MULT
... OPC_DIVU
:
11392 check_insn(env
, ctx
, INSN_VR54XX
);
11393 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
11394 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
11396 gen_muldiv(ctx
, op1
, rs
, rt
);
11398 case OPC_JR
... OPC_JALR
:
11399 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
11402 case OPC_TGE
... OPC_TEQ
: /* Traps */
11404 gen_trap(ctx
, op1
, rs
, rt
, -1);
11406 case OPC_MFHI
: /* Move from HI/LO */
11408 gen_HILO(ctx
, op1
, rd
);
11411 case OPC_MTLO
: /* Move to HI/LO */
11412 gen_HILO(ctx
, op1
, rs
);
11414 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
11415 #ifdef MIPS_STRICT_STANDARD
11416 MIPS_INVAL("PMON / selsl");
11417 generate_exception(ctx
, EXCP_RI
);
11419 gen_helper_0i(pmon
, sa
);
11423 generate_exception(ctx
, EXCP_SYSCALL
);
11424 ctx
->bstate
= BS_STOP
;
11427 generate_exception(ctx
, EXCP_BREAK
);
11430 #ifdef MIPS_STRICT_STANDARD
11431 MIPS_INVAL("SPIM");
11432 generate_exception(ctx
, EXCP_RI
);
11434 /* Implemented as RI exception for now. */
11435 MIPS_INVAL("spim (unofficial)");
11436 generate_exception(ctx
, EXCP_RI
);
11440 /* Treat as NOP. */
11444 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
11445 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11446 check_cp1_enabled(ctx
);
11447 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
11448 (ctx
->opcode
>> 16) & 1);
11450 generate_exception_err(ctx
, EXCP_CpU
, 1);
11454 #if defined(TARGET_MIPS64)
11455 /* MIPS64 specific opcodes */
11460 check_insn(env
, ctx
, ISA_MIPS3
);
11461 check_mips_64(ctx
);
11462 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11465 switch ((ctx
->opcode
>> 21) & 0x1f) {
11467 /* drotr is decoded as dsrl on non-R2 CPUs */
11468 if (env
->insn_flags
& ISA_MIPS32R2
) {
11473 check_insn(env
, ctx
, ISA_MIPS3
);
11474 check_mips_64(ctx
);
11475 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11478 generate_exception(ctx
, EXCP_RI
);
11483 switch ((ctx
->opcode
>> 21) & 0x1f) {
11485 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
11486 if (env
->insn_flags
& ISA_MIPS32R2
) {
11491 check_insn(env
, ctx
, ISA_MIPS3
);
11492 check_mips_64(ctx
);
11493 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11496 generate_exception(ctx
, EXCP_RI
);
11500 case OPC_DADD
... OPC_DSUBU
:
11501 check_insn(env
, ctx
, ISA_MIPS3
);
11502 check_mips_64(ctx
);
11503 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11507 check_insn(env
, ctx
, ISA_MIPS3
);
11508 check_mips_64(ctx
);
11509 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11512 switch ((ctx
->opcode
>> 6) & 0x1f) {
11514 /* drotrv is decoded as dsrlv on non-R2 CPUs */
11515 if (env
->insn_flags
& ISA_MIPS32R2
) {
11520 check_insn(env
, ctx
, ISA_MIPS3
);
11521 check_mips_64(ctx
);
11522 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11525 generate_exception(ctx
, EXCP_RI
);
11529 case OPC_DMULT
... OPC_DDIVU
:
11530 check_insn(env
, ctx
, ISA_MIPS3
);
11531 check_mips_64(ctx
);
11532 gen_muldiv(ctx
, op1
, rs
, rt
);
11535 default: /* Invalid */
11536 MIPS_INVAL("special");
11537 generate_exception(ctx
, EXCP_RI
);
11542 op1
= MASK_SPECIAL2(ctx
->opcode
);
11544 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
11545 case OPC_MSUB
... OPC_MSUBU
:
11546 check_insn(env
, ctx
, ISA_MIPS32
);
11547 gen_muldiv(ctx
, op1
, rs
, rt
);
11550 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11554 check_insn(env
, ctx
, ISA_MIPS32
);
11555 gen_cl(ctx
, op1
, rd
, rs
);
11558 /* XXX: not clear which exception should be raised
11559 * when in debug mode...
11561 check_insn(env
, ctx
, ISA_MIPS32
);
11562 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
11563 generate_exception(ctx
, EXCP_DBp
);
11565 generate_exception(ctx
, EXCP_DBp
);
11567 /* Treat as NOP. */
11569 #if defined(TARGET_MIPS64)
11572 check_insn(env
, ctx
, ISA_MIPS64
);
11573 check_mips_64(ctx
);
11574 gen_cl(ctx
, op1
, rd
, rs
);
11577 default: /* Invalid */
11578 MIPS_INVAL("special2");
11579 generate_exception(ctx
, EXCP_RI
);
11584 op1
= MASK_SPECIAL3(ctx
->opcode
);
11588 check_insn(env
, ctx
, ISA_MIPS32R2
);
11589 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
11592 check_insn(env
, ctx
, ISA_MIPS32R2
);
11593 op2
= MASK_BSHFL(ctx
->opcode
);
11594 gen_bshfl(ctx
, op2
, rt
, rd
);
11597 gen_rdhwr(env
, ctx
, rt
, rd
);
11600 check_insn(env
, ctx
, ASE_MT
);
11602 TCGv t0
= tcg_temp_new();
11603 TCGv t1
= tcg_temp_new();
11605 gen_load_gpr(t0
, rt
);
11606 gen_load_gpr(t1
, rs
);
11607 gen_helper_fork(t0
, t1
);
11613 check_insn(env
, ctx
, ASE_MT
);
11615 TCGv t0
= tcg_temp_new();
11617 save_cpu_state(ctx
, 1);
11618 gen_load_gpr(t0
, rs
);
11619 gen_helper_yield(t0
, t0
);
11620 gen_store_gpr(t0
, rd
);
11624 #if defined(TARGET_MIPS64)
11625 case OPC_DEXTM
... OPC_DEXT
:
11626 case OPC_DINSM
... OPC_DINS
:
11627 check_insn(env
, ctx
, ISA_MIPS64R2
);
11628 check_mips_64(ctx
);
11629 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
11632 check_insn(env
, ctx
, ISA_MIPS64R2
);
11633 check_mips_64(ctx
);
11634 op2
= MASK_DBSHFL(ctx
->opcode
);
11635 gen_bshfl(ctx
, op2
, rt
, rd
);
11638 default: /* Invalid */
11639 MIPS_INVAL("special3");
11640 generate_exception(ctx
, EXCP_RI
);
11645 op1
= MASK_REGIMM(ctx
->opcode
);
11647 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
11648 case OPC_BLTZAL
... OPC_BGEZALL
:
11649 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
11652 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
11654 gen_trap(ctx
, op1
, rs
, -1, imm
);
11657 check_insn(env
, ctx
, ISA_MIPS32R2
);
11658 /* Treat as NOP. */
11660 default: /* Invalid */
11661 MIPS_INVAL("regimm");
11662 generate_exception(ctx
, EXCP_RI
);
11667 check_cp0_enabled(ctx
);
11668 op1
= MASK_CP0(ctx
->opcode
);
11674 #if defined(TARGET_MIPS64)
11678 #ifndef CONFIG_USER_ONLY
11679 gen_cp0(env
, ctx
, op1
, rt
, rd
);
11680 #endif /* !CONFIG_USER_ONLY */
11682 case OPC_C0_FIRST
... OPC_C0_LAST
:
11683 #ifndef CONFIG_USER_ONLY
11684 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
11685 #endif /* !CONFIG_USER_ONLY */
11688 #ifndef CONFIG_USER_ONLY
11690 TCGv t0
= tcg_temp_new();
11692 op2
= MASK_MFMC0(ctx
->opcode
);
11695 check_insn(env
, ctx
, ASE_MT
);
11696 gen_helper_dmt(t0
, t0
);
11697 gen_store_gpr(t0
, rt
);
11700 check_insn(env
, ctx
, ASE_MT
);
11701 gen_helper_emt(t0
, t0
);
11702 gen_store_gpr(t0
, rt
);
11705 check_insn(env
, ctx
, ASE_MT
);
11706 gen_helper_dvpe(t0
, t0
);
11707 gen_store_gpr(t0
, rt
);
11710 check_insn(env
, ctx
, ASE_MT
);
11711 gen_helper_evpe(t0
, t0
);
11712 gen_store_gpr(t0
, rt
);
11715 check_insn(env
, ctx
, ISA_MIPS32R2
);
11716 save_cpu_state(ctx
, 1);
11718 gen_store_gpr(t0
, rt
);
11719 /* Stop translation as we may have switched the execution mode */
11720 ctx
->bstate
= BS_STOP
;
11723 check_insn(env
, ctx
, ISA_MIPS32R2
);
11724 save_cpu_state(ctx
, 1);
11726 gen_store_gpr(t0
, rt
);
11727 /* Stop translation as we may have switched the execution mode */
11728 ctx
->bstate
= BS_STOP
;
11730 default: /* Invalid */
11731 MIPS_INVAL("mfmc0");
11732 generate_exception(ctx
, EXCP_RI
);
11737 #endif /* !CONFIG_USER_ONLY */
11740 check_insn(env
, ctx
, ISA_MIPS32R2
);
11741 gen_load_srsgpr(rt
, rd
);
11744 check_insn(env
, ctx
, ISA_MIPS32R2
);
11745 gen_store_srsgpr(rt
, rd
);
11749 generate_exception(ctx
, EXCP_RI
);
11753 case OPC_ADDI
: /* Arithmetic with immediate opcode */
11755 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
11757 case OPC_SLTI
: /* Set on less than with immediate opcode */
11759 gen_slt_imm(env
, op
, rt
, rs
, imm
);
11761 case OPC_ANDI
: /* Arithmetic with immediate opcode */
11765 gen_logic_imm(env
, op
, rt
, rs
, imm
);
11767 case OPC_J
... OPC_JAL
: /* Jump */
11768 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11769 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
11772 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
11773 case OPC_BEQL
... OPC_BGTZL
:
11774 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
11777 case OPC_LB
... OPC_LWR
: /* Load and stores */
11778 case OPC_SB
... OPC_SW
:
11781 gen_ldst(ctx
, op
, rt
, rs
, imm
);
11784 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
11787 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
11788 /* Treat as NOP. */
11791 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
11792 /* Treat as NOP. */
11795 /* Floating point (COP1). */
11800 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
11804 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11805 check_cp1_enabled(ctx
);
11806 op1
= MASK_CP1(ctx
->opcode
);
11810 check_insn(env
, ctx
, ISA_MIPS32R2
);
11815 gen_cp1(ctx
, op1
, rt
, rd
);
11817 #if defined(TARGET_MIPS64)
11820 check_insn(env
, ctx
, ISA_MIPS3
);
11821 gen_cp1(ctx
, op1
, rt
, rd
);
11827 check_insn(env
, ctx
, ASE_MIPS3D
);
11830 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
11831 (rt
>> 2) & 0x7, imm
<< 2);
11839 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
11844 generate_exception (ctx
, EXCP_RI
);
11848 generate_exception_err(ctx
, EXCP_CpU
, 1);
11858 /* COP2: Not implemented. */
11859 generate_exception_err(ctx
, EXCP_CpU
, 2);
11863 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11864 check_cp1_enabled(ctx
);
11865 op1
= MASK_CP3(ctx
->opcode
);
11873 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
11876 /* Treat as NOP. */
11891 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
11895 generate_exception (ctx
, EXCP_RI
);
11899 generate_exception_err(ctx
, EXCP_CpU
, 1);
11903 #if defined(TARGET_MIPS64)
11904 /* MIPS64 opcodes */
11906 case OPC_LDL
... OPC_LDR
:
11907 case OPC_SDL
... OPC_SDR
:
11911 check_insn(env
, ctx
, ISA_MIPS3
);
11912 check_mips_64(ctx
);
11913 gen_ldst(ctx
, op
, rt
, rs
, imm
);
11916 check_insn(env
, ctx
, ISA_MIPS3
);
11917 check_mips_64(ctx
);
11918 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
11922 check_insn(env
, ctx
, ISA_MIPS3
);
11923 check_mips_64(ctx
);
11924 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
11928 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
11929 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11930 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
11934 check_insn(env
, ctx
, ASE_MDMX
);
11935 /* MDMX: Not implemented. */
11936 default: /* Invalid */
11937 MIPS_INVAL("major opcode");
11938 generate_exception(ctx
, EXCP_RI
);
11944 gen_intermediate_code_internal (CPUState
*env
, TranslationBlock
*tb
,
11948 target_ulong pc_start
;
11949 uint16_t *gen_opc_end
;
11958 qemu_log("search pc %d\n", search_pc
);
11961 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
11964 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
11966 ctx
.bstate
= BS_NONE
;
11967 /* Restore delay slot state from the tb context. */
11968 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
11969 restore_cpu_state(env
, &ctx
);
11970 #ifdef CONFIG_USER_ONLY
11971 ctx
.mem_idx
= MIPS_HFLAG_UM
;
11973 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
11976 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
11977 if (max_insns
== 0)
11978 max_insns
= CF_COUNT_MASK
;
11979 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
11980 gen_icount_start();
11981 while (ctx
.bstate
== BS_NONE
) {
11982 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
11983 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
11984 if (bp
->pc
== ctx
.pc
) {
11985 save_cpu_state(&ctx
, 1);
11986 ctx
.bstate
= BS_BRANCH
;
11987 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
11988 /* Include the breakpoint location or the tb won't
11989 * be flushed when it must be. */
11991 goto done_generating
;
11997 j
= gen_opc_ptr
- gen_opc_buf
;
12001 gen_opc_instr_start
[lj
++] = 0;
12003 gen_opc_pc
[lj
] = ctx
.pc
;
12004 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12005 gen_opc_instr_start
[lj
] = 1;
12006 gen_opc_icount
[lj
] = num_insns
;
12008 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12012 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12013 ctx
.opcode
= ldl_code(ctx
.pc
);
12015 decode_opc(env
, &ctx
, &is_branch
);
12016 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12017 ctx
.opcode
= lduw_code(ctx
.pc
);
12018 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12019 } else if (env
->insn_flags
& ASE_MIPS16
) {
12020 ctx
.opcode
= lduw_code(ctx
.pc
);
12021 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12023 generate_exception(&ctx
, EXCP_RI
);
12024 ctx
.bstate
= BS_STOP
;
12028 handle_delay_slot(env
, &ctx
, insn_bytes
);
12030 ctx
.pc
+= insn_bytes
;
12034 /* Execute a branch and its delay slot as a single instruction.
12035 This is what GDB expects and is consistent with what the
12036 hardware does (e.g. if a delay slot instruction faults, the
12037 reported PC is the PC of the branch). */
12038 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12041 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12044 if (gen_opc_ptr
>= gen_opc_end
)
12047 if (num_insns
>= max_insns
)
12053 if (tb
->cflags
& CF_LAST_IO
)
12055 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12056 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12057 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
12059 switch (ctx
.bstate
) {
12061 gen_helper_interrupt_restart();
12062 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12065 save_cpu_state(&ctx
, 0);
12066 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12069 gen_helper_interrupt_restart();
12070 tcg_gen_exit_tb(0);
12078 gen_icount_end(tb
, num_insns
);
12079 *gen_opc_ptr
= INDEX_op_end
;
12081 j
= gen_opc_ptr
- gen_opc_buf
;
12084 gen_opc_instr_start
[lj
++] = 0;
12086 tb
->size
= ctx
.pc
- pc_start
;
12087 tb
->icount
= num_insns
;
12091 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12092 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12093 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12099 void gen_intermediate_code (CPUState
*env
, struct TranslationBlock
*tb
)
12101 gen_intermediate_code_internal(env
, tb
, 0);
12104 void gen_intermediate_code_pc (CPUState
*env
, struct TranslationBlock
*tb
)
12106 gen_intermediate_code_internal(env
, tb
, 1);
12109 static void fpu_dump_state(CPUState
*env
, FILE *f
,
12110 int (*fpu_fprintf
)(FILE *f
, const char *fmt
, ...),
12114 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12116 #define printfpr(fp) \
12119 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12120 " fd:%13g fs:%13g psu: %13g\n", \
12121 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12122 (double)(fp)->fd, \
12123 (double)(fp)->fs[FP_ENDIAN_IDX], \
12124 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12127 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
12128 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
12129 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12130 " fd:%13g fs:%13g psu:%13g\n", \
12131 tmp.w[FP_ENDIAN_IDX], tmp.d, \
12133 (double)tmp.fs[FP_ENDIAN_IDX], \
12134 (double)tmp.fs[!FP_ENDIAN_IDX]); \
12139 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%08x(0x%02x)\n",
12140 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
, env
->active_fpu
.fp_status
,
12141 get_float_exception_flags(&env
->active_fpu
.fp_status
));
12142 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
12143 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
12144 printfpr(&env
->active_fpu
.fpr
[i
]);
12150 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12151 /* Debug help: The architecture requires 32bit code to maintain proper
12152 sign-extended values on 64bit machines. */
12154 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
12157 cpu_mips_check_sign_extensions (CPUState
*env
, FILE *f
,
12158 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
12163 if (!SIGN_EXT_P(env
->active_tc
.PC
))
12164 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
12165 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
12166 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
12167 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
12168 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
12169 if (!SIGN_EXT_P(env
->btarget
))
12170 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
12172 for (i
= 0; i
< 32; i
++) {
12173 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
12174 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
12177 if (!SIGN_EXT_P(env
->CP0_EPC
))
12178 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
12179 if (!SIGN_EXT_P(env
->lladdr
))
12180 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
12184 void cpu_dump_state (CPUState
*env
, FILE *f
,
12185 int (*cpu_fprintf
)(FILE *f
, const char *fmt
, ...),
12190 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
12191 " LO=0x" TARGET_FMT_lx
" ds %04x "
12192 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
12193 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
12194 env
->hflags
, env
->btarget
, env
->bcond
);
12195 for (i
= 0; i
< 32; i
++) {
12197 cpu_fprintf(f
, "GPR%02d:", i
);
12198 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
12200 cpu_fprintf(f
, "\n");
12203 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
12204 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
12205 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
12206 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
12207 if (env
->hflags
& MIPS_HFLAG_FPU
)
12208 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
12209 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12210 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
12214 static void mips_tcg_init(void)
12219 /* Initialize various static tables. */
12223 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
12224 TCGV_UNUSED(cpu_gpr
[0]);
12225 for (i
= 1; i
< 32; i
++)
12226 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
12227 offsetof(CPUState
, active_tc
.gpr
[i
]),
12229 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
12230 offsetof(CPUState
, active_tc
.PC
), "PC");
12231 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
12232 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
12233 offsetof(CPUState
, active_tc
.HI
[i
]),
12235 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
12236 offsetof(CPUState
, active_tc
.LO
[i
]),
12238 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
12239 offsetof(CPUState
, active_tc
.ACX
[i
]),
12242 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
12243 offsetof(CPUState
, active_tc
.DSPControl
),
12245 bcond
= tcg_global_mem_new(TCG_AREG0
,
12246 offsetof(CPUState
, bcond
), "bcond");
12247 btarget
= tcg_global_mem_new(TCG_AREG0
,
12248 offsetof(CPUState
, btarget
), "btarget");
12249 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
12250 offsetof(CPUState
, hflags
), "hflags");
12252 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
12253 offsetof(CPUState
, active_fpu
.fcr0
),
12255 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
12256 offsetof(CPUState
, active_fpu
.fcr31
),
12259 /* register helpers */
12260 #define GEN_HELPER 2
12261 #include "helper.h"
12266 #include "translate_init.c"
12268 CPUMIPSState
*cpu_mips_init (const char *cpu_model
)
12271 const mips_def_t
*def
;
12273 def
= cpu_mips_find_by_name(cpu_model
);
12276 env
= qemu_mallocz(sizeof(CPUMIPSState
));
12277 env
->cpu_model
= def
;
12278 env
->cpu_model_str
= cpu_model
;
12280 cpu_exec_init(env
);
12281 #ifndef CONFIG_USER_ONLY
12282 mmu_init(env
, def
);
12284 fpu_init(env
, def
);
12285 mvp_init(env
, def
);
12288 qemu_init_vcpu(env
);
12292 void cpu_reset (CPUMIPSState
*env
)
12294 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
12295 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
12296 log_cpu_state(env
, 0);
12299 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
12302 /* Reset registers to their default values */
12303 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
12304 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
12305 #ifdef TARGET_WORDS_BIGENDIAN
12306 env
->CP0_Config0
|= (1 << CP0C0_BE
);
12308 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
12309 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
12310 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
12311 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
12312 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
12313 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
12314 << env
->cpu_model
->CP0_LLAddr_shift
;
12315 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
12316 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
12317 env
->CCRes
= env
->cpu_model
->CCRes
;
12318 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
12319 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
12320 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
12321 env
->current_tc
= 0;
12322 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
12323 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
12324 #if defined(TARGET_MIPS64)
12325 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12326 env
->SEGMask
|= 3ULL << 62;
12329 env
->PABITS
= env
->cpu_model
->PABITS
;
12330 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
12331 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
12332 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
12333 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
12334 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
12335 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
12336 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
12337 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
12338 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
12339 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
12340 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
12341 env
->insn_flags
= env
->cpu_model
->insn_flags
;
12343 #if defined(CONFIG_USER_ONLY)
12344 env
->hflags
= MIPS_HFLAG_UM
;
12345 /* Enable access to the SYNCI_Step register. */
12346 env
->CP0_HWREna
|= (1 << 1);
12347 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12348 env
->hflags
|= MIPS_HFLAG_FPU
;
12350 #ifdef TARGET_MIPS64
12351 if (env
->active_fpu
.fcr0
& (1 << FCR0_F64
)) {
12352 env
->hflags
|= MIPS_HFLAG_F64
;
12356 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
12357 /* If the exception was raised from a delay slot,
12358 come back to the jump. */
12359 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
12361 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
12363 env
->active_tc
.PC
= (int32_t)0xBFC00000;
12364 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
12365 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
12366 env
->CP0_Wired
= 0;
12367 /* SMP not implemented */
12368 env
->CP0_EBase
= 0x80000000;
12369 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
12370 /* vectored interrupts not implemented, timer on int 7,
12371 no performance counters. */
12372 env
->CP0_IntCtl
= 0xe0000000;
12376 for (i
= 0; i
< 7; i
++) {
12377 env
->CP0_WatchLo
[i
] = 0;
12378 env
->CP0_WatchHi
[i
] = 0x80000000;
12380 env
->CP0_WatchLo
[7] = 0;
12381 env
->CP0_WatchHi
[7] = 0;
12383 /* Count register increments in debug mode, EJTAG version 1 */
12384 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
12385 env
->hflags
= MIPS_HFLAG_CP0
;
12387 #if defined(TARGET_MIPS64)
12388 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12389 env
->hflags
|= MIPS_HFLAG_64
;
12392 env
->exception_index
= EXCP_NONE
;
12395 void gen_pc_load(CPUState
*env
, TranslationBlock
*tb
,
12396 unsigned long searched_pc
, int pc_pos
, void *puc
)
12398 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
12399 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
12400 env
->hflags
|= gen_opc_hflags
[pc_pos
];