2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
31 //#define MIPS_DEBUG_DISAS
32 //#define MIPS_DEBUG_SIGN_EXTENSIONS
34 /* MIPS major opcodes */
35 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
38 /* indirect opcode tables */
39 OPC_SPECIAL
= (0x00 << 26),
40 OPC_REGIMM
= (0x01 << 26),
41 OPC_CP0
= (0x10 << 26),
42 OPC_CP1
= (0x11 << 26),
43 OPC_CP2
= (0x12 << 26),
44 OPC_CP3
= (0x13 << 26),
45 OPC_SPECIAL2
= (0x1C << 26),
46 OPC_SPECIAL3
= (0x1F << 26),
47 /* arithmetic with immediate */
48 OPC_ADDI
= (0x08 << 26),
49 OPC_ADDIU
= (0x09 << 26),
50 OPC_SLTI
= (0x0A << 26),
51 OPC_SLTIU
= (0x0B << 26),
52 /* logic with immediate */
53 OPC_ANDI
= (0x0C << 26),
54 OPC_ORI
= (0x0D << 26),
55 OPC_XORI
= (0x0E << 26),
56 OPC_LUI
= (0x0F << 26),
57 /* arithmetic with immediate */
58 OPC_DADDI
= (0x18 << 26),
59 OPC_DADDIU
= (0x19 << 26),
60 /* Jump and branches */
62 OPC_JAL
= (0x03 << 26),
63 OPC_JALS
= OPC_JAL
| 0x5,
64 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
65 OPC_BEQL
= (0x14 << 26),
66 OPC_BNE
= (0x05 << 26),
67 OPC_BNEL
= (0x15 << 26),
68 OPC_BLEZ
= (0x06 << 26),
69 OPC_BLEZL
= (0x16 << 26),
70 OPC_BGTZ
= (0x07 << 26),
71 OPC_BGTZL
= (0x17 << 26),
72 OPC_JALX
= (0x1D << 26), /* MIPS 16 only */
73 OPC_JALXS
= OPC_JALX
| 0x5,
75 OPC_LDL
= (0x1A << 26),
76 OPC_LDR
= (0x1B << 26),
77 OPC_LB
= (0x20 << 26),
78 OPC_LH
= (0x21 << 26),
79 OPC_LWL
= (0x22 << 26),
80 OPC_LW
= (0x23 << 26),
81 OPC_LWPC
= OPC_LW
| 0x5,
82 OPC_LBU
= (0x24 << 26),
83 OPC_LHU
= (0x25 << 26),
84 OPC_LWR
= (0x26 << 26),
85 OPC_LWU
= (0x27 << 26),
86 OPC_SB
= (0x28 << 26),
87 OPC_SH
= (0x29 << 26),
88 OPC_SWL
= (0x2A << 26),
89 OPC_SW
= (0x2B << 26),
90 OPC_SDL
= (0x2C << 26),
91 OPC_SDR
= (0x2D << 26),
92 OPC_SWR
= (0x2E << 26),
93 OPC_LL
= (0x30 << 26),
94 OPC_LLD
= (0x34 << 26),
95 OPC_LD
= (0x37 << 26),
96 OPC_LDPC
= OPC_LD
| 0x5,
97 OPC_SC
= (0x38 << 26),
98 OPC_SCD
= (0x3C << 26),
99 OPC_SD
= (0x3F << 26),
100 /* Floating point load/store */
101 OPC_LWC1
= (0x31 << 26),
102 OPC_LWC2
= (0x32 << 26),
103 OPC_LDC1
= (0x35 << 26),
104 OPC_LDC2
= (0x36 << 26),
105 OPC_SWC1
= (0x39 << 26),
106 OPC_SWC2
= (0x3A << 26),
107 OPC_SDC1
= (0x3D << 26),
108 OPC_SDC2
= (0x3E << 26),
109 /* MDMX ASE specific */
110 OPC_MDMX
= (0x1E << 26),
111 /* Cache and prefetch */
112 OPC_CACHE
= (0x2F << 26),
113 OPC_PREF
= (0x33 << 26),
114 /* Reserved major opcode */
115 OPC_MAJOR3B_RESERVED
= (0x3B << 26),
118 /* MIPS special opcodes */
119 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
123 OPC_SLL
= 0x00 | OPC_SPECIAL
,
124 /* NOP is SLL r0, r0, 0 */
125 /* SSNOP is SLL r0, r0, 1 */
126 /* EHB is SLL r0, r0, 3 */
127 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
128 OPC_ROTR
= OPC_SRL
| (1 << 21),
129 OPC_SRA
= 0x03 | OPC_SPECIAL
,
130 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
131 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
132 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
133 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
134 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
135 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
136 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
137 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
138 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
139 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
140 OPC_DROTR
= OPC_DSRL
| (1 << 21),
141 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
142 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
143 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
144 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
145 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
146 /* Multiplication / division */
147 OPC_MULT
= 0x18 | OPC_SPECIAL
,
148 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
149 OPC_DIV
= 0x1A | OPC_SPECIAL
,
150 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
151 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
152 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
153 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
154 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
155 /* 2 registers arithmetic / logic */
156 OPC_ADD
= 0x20 | OPC_SPECIAL
,
157 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
158 OPC_SUB
= 0x22 | OPC_SPECIAL
,
159 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
160 OPC_AND
= 0x24 | OPC_SPECIAL
,
161 OPC_OR
= 0x25 | OPC_SPECIAL
,
162 OPC_XOR
= 0x26 | OPC_SPECIAL
,
163 OPC_NOR
= 0x27 | OPC_SPECIAL
,
164 OPC_SLT
= 0x2A | OPC_SPECIAL
,
165 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
166 OPC_DADD
= 0x2C | OPC_SPECIAL
,
167 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
168 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
169 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
171 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
172 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
173 OPC_JALRC
= OPC_JALR
| (0x5 << 6),
174 OPC_JALRS
= 0x10 | OPC_SPECIAL
| (0x5 << 6),
176 OPC_TGE
= 0x30 | OPC_SPECIAL
,
177 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
178 OPC_TLT
= 0x32 | OPC_SPECIAL
,
179 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
180 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
181 OPC_TNE
= 0x36 | OPC_SPECIAL
,
182 /* HI / LO registers load & stores */
183 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
184 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
185 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
186 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
187 /* Conditional moves */
188 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
189 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
191 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
194 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
195 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
196 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
197 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
198 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
200 OPC_SPECIAL15_RESERVED
= 0x15 | OPC_SPECIAL
,
201 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
202 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
203 OPC_SPECIAL35_RESERVED
= 0x35 | OPC_SPECIAL
,
204 OPC_SPECIAL37_RESERVED
= 0x37 | OPC_SPECIAL
,
205 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
206 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
209 /* Multiplication variants of the vr54xx. */
210 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
213 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
214 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
215 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
216 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
217 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
218 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
219 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
220 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
221 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
222 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
223 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
224 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
225 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
226 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
229 /* REGIMM (rt field) opcodes */
230 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
233 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
234 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
235 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
236 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
237 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
238 OPC_BLTZALS
= OPC_BLTZAL
| 0x5, /* microMIPS */
239 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
240 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
241 OPC_BGEZALS
= OPC_BGEZAL
| 0x5, /* microMIPS */
242 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
243 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
244 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
245 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
246 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
247 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
248 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
249 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
252 /* Special2 opcodes */
253 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
256 /* Multiply & xxx operations */
257 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
258 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
259 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
260 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
261 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
263 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
264 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
265 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
266 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
267 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
268 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
269 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
270 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
271 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
272 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
273 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
274 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
276 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
277 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
278 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
279 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
281 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
284 /* Special3 opcodes */
285 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
288 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
289 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
290 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
291 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
292 OPC_INS
= 0x04 | OPC_SPECIAL3
,
293 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
294 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
295 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
296 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
297 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
298 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
299 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
300 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
303 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
304 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
305 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
306 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
307 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
308 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
309 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
310 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
311 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
312 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
313 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
314 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
318 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
321 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
322 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
323 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
327 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
330 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
331 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
334 /* Coprocessor 0 (rs field) */
335 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
338 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
339 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
340 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
341 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
342 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
343 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
344 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
345 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
346 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
347 OPC_C0
= (0x10 << 21) | OPC_CP0
,
348 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
349 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
353 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
356 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
357 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
358 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
359 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
360 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
361 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
364 /* Coprocessor 0 (with rs == C0) */
365 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
368 OPC_TLBR
= 0x01 | OPC_C0
,
369 OPC_TLBWI
= 0x02 | OPC_C0
,
370 OPC_TLBWR
= 0x06 | OPC_C0
,
371 OPC_TLBP
= 0x08 | OPC_C0
,
372 OPC_RFE
= 0x10 | OPC_C0
,
373 OPC_ERET
= 0x18 | OPC_C0
,
374 OPC_DERET
= 0x1F | OPC_C0
,
375 OPC_WAIT
= 0x20 | OPC_C0
,
378 /* Coprocessor 1 (rs field) */
379 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
381 /* Values for the fmt field in FP instructions */
383 /* 0 - 15 are reserved */
384 FMT_S
= 16, /* single fp */
385 FMT_D
= 17, /* double fp */
386 FMT_E
= 18, /* extended fp */
387 FMT_Q
= 19, /* quad fp */
388 FMT_W
= 20, /* 32-bit fixed */
389 FMT_L
= 21, /* 64-bit fixed */
390 FMT_PS
= 22, /* paired single fp */
391 /* 23 - 31 are reserved */
395 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
396 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
397 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
398 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
399 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
400 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
401 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
402 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
403 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
404 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
405 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
406 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
407 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
408 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
409 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
410 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
411 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
412 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
415 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
416 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
419 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
420 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
421 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
422 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
426 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
427 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
431 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
432 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
435 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
438 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
439 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
440 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
441 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
442 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
443 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
444 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
445 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
446 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
449 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
452 OPC_LWXC1
= 0x00 | OPC_CP3
,
453 OPC_LDXC1
= 0x01 | OPC_CP3
,
454 OPC_LUXC1
= 0x05 | OPC_CP3
,
455 OPC_SWXC1
= 0x08 | OPC_CP3
,
456 OPC_SDXC1
= 0x09 | OPC_CP3
,
457 OPC_SUXC1
= 0x0D | OPC_CP3
,
458 OPC_PREFX
= 0x0F | OPC_CP3
,
459 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
460 OPC_MADD_S
= 0x20 | OPC_CP3
,
461 OPC_MADD_D
= 0x21 | OPC_CP3
,
462 OPC_MADD_PS
= 0x26 | OPC_CP3
,
463 OPC_MSUB_S
= 0x28 | OPC_CP3
,
464 OPC_MSUB_D
= 0x29 | OPC_CP3
,
465 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
466 OPC_NMADD_S
= 0x30 | OPC_CP3
,
467 OPC_NMADD_D
= 0x31 | OPC_CP3
,
468 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
469 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
470 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
471 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
474 /* global register indices */
475 static TCGv_ptr cpu_env
;
476 static TCGv cpu_gpr
[32], cpu_PC
;
477 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
], cpu_ACX
[MIPS_DSP_ACC
];
478 static TCGv cpu_dspctrl
, btarget
, bcond
;
479 static TCGv_i32 hflags
;
480 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
482 static uint32_t gen_opc_hflags
[OPC_BUF_SIZE
];
484 #include "gen-icount.h"
486 #define gen_helper_0i(name, arg) do { \
487 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
488 gen_helper_##name(helper_tmp); \
489 tcg_temp_free_i32(helper_tmp); \
492 #define gen_helper_1i(name, arg1, arg2) do { \
493 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
494 gen_helper_##name(arg1, helper_tmp); \
495 tcg_temp_free_i32(helper_tmp); \
498 #define gen_helper_2i(name, arg1, arg2, arg3) do { \
499 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
500 gen_helper_##name(arg1, arg2, helper_tmp); \
501 tcg_temp_free_i32(helper_tmp); \
504 #define gen_helper_3i(name, arg1, arg2, arg3, arg4) do { \
505 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
506 gen_helper_##name(arg1, arg2, arg3, helper_tmp); \
507 tcg_temp_free_i32(helper_tmp); \
510 typedef struct DisasContext
{
511 struct TranslationBlock
*tb
;
512 target_ulong pc
, saved_pc
;
514 int singlestep_enabled
;
515 /* Routine used to access memory */
517 uint32_t hflags
, saved_hflags
;
519 target_ulong btarget
;
523 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
524 * exception condition */
525 BS_STOP
= 1, /* We want to stop translation for any reason */
526 BS_BRANCH
= 2, /* We reached a branch condition */
527 BS_EXCP
= 3, /* We reached an exception condition */
530 static const char *regnames
[] =
531 { "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
532 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
533 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
534 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", };
536 static const char *regnames_HI
[] =
537 { "HI0", "HI1", "HI2", "HI3", };
539 static const char *regnames_LO
[] =
540 { "LO0", "LO1", "LO2", "LO3", };
542 static const char *regnames_ACX
[] =
543 { "ACX0", "ACX1", "ACX2", "ACX3", };
545 static const char *fregnames
[] =
546 { "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
547 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
548 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
549 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31", };
551 #ifdef MIPS_DEBUG_DISAS
552 #define MIPS_DEBUG(fmt, ...) \
553 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
554 TARGET_FMT_lx ": %08x " fmt "\n", \
555 ctx->pc, ctx->opcode , ## __VA_ARGS__)
556 #define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
558 #define MIPS_DEBUG(fmt, ...) do { } while(0)
559 #define LOG_DISAS(...) do { } while (0)
562 #define MIPS_INVAL(op) \
564 MIPS_DEBUG("Invalid %s %03x %03x %03x", op, ctx->opcode >> 26, \
565 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
568 /* General purpose registers moves. */
569 static inline void gen_load_gpr (TCGv t
, int reg
)
572 tcg_gen_movi_tl(t
, 0);
574 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
577 static inline void gen_store_gpr (TCGv t
, int reg
)
580 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
583 /* Moves to/from ACX register. */
584 static inline void gen_load_ACX (TCGv t
, int reg
)
586 tcg_gen_mov_tl(t
, cpu_ACX
[reg
]);
589 static inline void gen_store_ACX (TCGv t
, int reg
)
591 tcg_gen_mov_tl(cpu_ACX
[reg
], t
);
594 /* Moves to/from shadow registers. */
595 static inline void gen_load_srsgpr (int from
, int to
)
597 TCGv t0
= tcg_temp_new();
600 tcg_gen_movi_tl(t0
, 0);
602 TCGv_i32 t2
= tcg_temp_new_i32();
603 TCGv_ptr addr
= tcg_temp_new_ptr();
605 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
606 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
607 tcg_gen_andi_i32(t2
, t2
, 0xf);
608 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
609 tcg_gen_ext_i32_ptr(addr
, t2
);
610 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
612 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
613 tcg_temp_free_ptr(addr
);
614 tcg_temp_free_i32(t2
);
616 gen_store_gpr(t0
, to
);
620 static inline void gen_store_srsgpr (int from
, int to
)
623 TCGv t0
= tcg_temp_new();
624 TCGv_i32 t2
= tcg_temp_new_i32();
625 TCGv_ptr addr
= tcg_temp_new_ptr();
627 gen_load_gpr(t0
, from
);
628 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
629 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
630 tcg_gen_andi_i32(t2
, t2
, 0xf);
631 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
632 tcg_gen_ext_i32_ptr(addr
, t2
);
633 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
635 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
636 tcg_temp_free_ptr(addr
);
637 tcg_temp_free_i32(t2
);
642 /* Floating point register moves. */
643 static inline void gen_load_fpr32 (TCGv_i32 t
, int reg
)
645 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
648 static inline void gen_store_fpr32 (TCGv_i32 t
, int reg
)
650 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[FP_ENDIAN_IDX
]));
653 static inline void gen_load_fpr32h (TCGv_i32 t
, int reg
)
655 tcg_gen_ld_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
658 static inline void gen_store_fpr32h (TCGv_i32 t
, int reg
)
660 tcg_gen_st_i32(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].w
[!FP_ENDIAN_IDX
]));
663 static inline void gen_load_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
665 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
666 tcg_gen_ld_i64(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].d
));
668 TCGv_i32 t0
= tcg_temp_new_i32();
669 TCGv_i32 t1
= tcg_temp_new_i32();
670 gen_load_fpr32(t0
, reg
& ~1);
671 gen_load_fpr32(t1
, reg
| 1);
672 tcg_gen_concat_i32_i64(t
, t0
, t1
);
673 tcg_temp_free_i32(t0
);
674 tcg_temp_free_i32(t1
);
678 static inline void gen_store_fpr64 (DisasContext
*ctx
, TCGv_i64 t
, int reg
)
680 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
681 tcg_gen_st_i64(t
, cpu_env
, offsetof(CPUMIPSState
, active_fpu
.fpr
[reg
].d
));
683 TCGv_i64 t0
= tcg_temp_new_i64();
684 TCGv_i32 t1
= tcg_temp_new_i32();
685 tcg_gen_trunc_i64_i32(t1
, t
);
686 gen_store_fpr32(t1
, reg
& ~1);
687 tcg_gen_shri_i64(t0
, t
, 32);
688 tcg_gen_trunc_i64_i32(t1
, t0
);
689 gen_store_fpr32(t1
, reg
| 1);
690 tcg_temp_free_i32(t1
);
691 tcg_temp_free_i64(t0
);
695 static inline int get_fp_bit (int cc
)
704 static inline void gen_save_pc(target_ulong pc
)
706 tcg_gen_movi_tl(cpu_PC
, pc
);
709 static inline void save_cpu_state (DisasContext
*ctx
, int do_save_pc
)
711 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
712 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
713 gen_save_pc(ctx
->pc
);
714 ctx
->saved_pc
= ctx
->pc
;
716 if (ctx
->hflags
!= ctx
->saved_hflags
) {
717 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
718 ctx
->saved_hflags
= ctx
->hflags
;
719 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
725 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
731 static inline void restore_cpu_state (CPUMIPSState
*env
, DisasContext
*ctx
)
733 ctx
->saved_hflags
= ctx
->hflags
;
734 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
740 ctx
->btarget
= env
->btarget
;
746 generate_exception_err (DisasContext
*ctx
, int excp
, int err
)
748 TCGv_i32 texcp
= tcg_const_i32(excp
);
749 TCGv_i32 terr
= tcg_const_i32(err
);
750 save_cpu_state(ctx
, 1);
751 gen_helper_raise_exception_err(texcp
, terr
);
752 tcg_temp_free_i32(terr
);
753 tcg_temp_free_i32(texcp
);
757 generate_exception (DisasContext
*ctx
, int excp
)
759 save_cpu_state(ctx
, 1);
760 gen_helper_0i(raise_exception
, excp
);
763 /* Addresses computation */
764 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
766 tcg_gen_add_tl(ret
, arg0
, arg1
);
768 #if defined(TARGET_MIPS64)
769 /* For compatibility with 32-bit code, data reference in user mode
770 with Status_UX = 0 should be casted to 32-bit and sign extended.
771 See the MIPS64 PRA manual, section 4.10. */
772 if (((ctx
->hflags
& MIPS_HFLAG_KSU
) == MIPS_HFLAG_UM
) &&
773 !(ctx
->hflags
& MIPS_HFLAG_UX
)) {
774 tcg_gen_ext32s_i64(ret
, ret
);
779 static inline void check_cp0_enabled(DisasContext
*ctx
)
781 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
782 generate_exception_err(ctx
, EXCP_CpU
, 0);
785 static inline void check_cp1_enabled(DisasContext
*ctx
)
787 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
788 generate_exception_err(ctx
, EXCP_CpU
, 1);
791 /* Verify that the processor is running with COP1X instructions enabled.
792 This is associated with the nabla symbol in the MIPS32 and MIPS64
795 static inline void check_cop1x(DisasContext
*ctx
)
797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
798 generate_exception(ctx
, EXCP_RI
);
801 /* Verify that the processor is running with 64-bit floating-point
802 operations enabled. */
804 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
806 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
807 generate_exception(ctx
, EXCP_RI
);
811 * Verify if floating point register is valid; an operation is not defined
812 * if bit 0 of any register specification is set and the FR bit in the
813 * Status register equals zero, since the register numbers specify an
814 * even-odd pair of adjacent coprocessor general registers. When the FR bit
815 * in the Status register equals one, both even and odd register numbers
816 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
818 * Multiple 64 bit wide registers can be checked by calling
819 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
821 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
823 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
824 generate_exception(ctx
, EXCP_RI
);
827 /* This code generates a "reserved instruction" exception if the
828 CPU does not support the instruction set corresponding to flags. */
829 static inline void check_insn(CPUMIPSState
*env
, DisasContext
*ctx
, int flags
)
831 if (unlikely(!(env
->insn_flags
& flags
)))
832 generate_exception(ctx
, EXCP_RI
);
835 /* This code generates a "reserved instruction" exception if 64-bit
836 instructions are not enabled. */
837 static inline void check_mips_64(DisasContext
*ctx
)
839 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
840 generate_exception(ctx
, EXCP_RI
);
843 /* Define small wrappers for gen_load_fpr* so that we have a uniform
844 calling interface for 32 and 64-bit FPRs. No sense in changing
845 all callers for gen_load_fpr32 when we need the CTX parameter for
847 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(x, y)
848 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
849 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
850 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
851 int ft, int fs, int cc) \
853 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
854 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
857 check_cp1_64bitmode(ctx); \
863 check_cp1_registers(ctx, fs | ft); \
871 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
872 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
874 case 0: gen_helper_2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
875 case 1: gen_helper_2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
876 case 2: gen_helper_2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
877 case 3: gen_helper_2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
878 case 4: gen_helper_2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
879 case 5: gen_helper_2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
880 case 6: gen_helper_2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
881 case 7: gen_helper_2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
882 case 8: gen_helper_2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
883 case 9: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
884 case 10: gen_helper_2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
885 case 11: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
886 case 12: gen_helper_2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
887 case 13: gen_helper_2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
888 case 14: gen_helper_2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
889 case 15: gen_helper_2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
892 tcg_temp_free_i##bits (fp0); \
893 tcg_temp_free_i##bits (fp1); \
896 FOP_CONDS(, 0, d
, FMT_D
, 64)
897 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
898 FOP_CONDS(, 0, s
, FMT_S
, 32)
899 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
900 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
901 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
903 #undef gen_ldcmp_fpr32
904 #undef gen_ldcmp_fpr64
906 /* load/store instructions. */
907 #define OP_LD(insn,fname) \
908 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
910 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
917 #if defined(TARGET_MIPS64)
923 #define OP_ST(insn,fname) \
924 static inline void op_st_##insn(TCGv arg1, TCGv arg2, DisasContext *ctx) \
926 tcg_gen_qemu_##fname(arg1, arg2, ctx->mem_idx); \
931 #if defined(TARGET_MIPS64)
936 #ifdef CONFIG_USER_ONLY
937 #define OP_LD_ATOMIC(insn,fname) \
938 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
940 TCGv t0 = tcg_temp_new(); \
941 tcg_gen_mov_tl(t0, arg1); \
942 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
943 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
944 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
948 #define OP_LD_ATOMIC(insn,fname) \
949 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
951 gen_helper_2i(insn, ret, arg1, ctx->mem_idx); \
954 OP_LD_ATOMIC(ll
,ld32s
);
955 #if defined(TARGET_MIPS64)
956 OP_LD_ATOMIC(lld
,ld64
);
960 #ifdef CONFIG_USER_ONLY
961 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
962 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
964 TCGv t0 = tcg_temp_new(); \
965 int l1 = gen_new_label(); \
966 int l2 = gen_new_label(); \
968 tcg_gen_andi_tl(t0, arg2, almask); \
969 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
970 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
971 generate_exception(ctx, EXCP_AdES); \
973 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
974 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
975 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
976 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
977 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
978 gen_helper_0i(raise_exception, EXCP_SC); \
980 tcg_gen_movi_tl(t0, 0); \
981 gen_store_gpr(t0, rt); \
985 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
986 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
988 TCGv t0 = tcg_temp_new(); \
989 gen_helper_3i(insn, t0, arg1, arg2, ctx->mem_idx); \
990 gen_store_gpr(t0, rt); \
994 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
995 #if defined(TARGET_MIPS64)
996 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
1000 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
1001 int base
, int16_t offset
)
1004 tcg_gen_movi_tl(addr
, offset
);
1005 } else if (offset
== 0) {
1006 gen_load_gpr(addr
, base
);
1008 tcg_gen_movi_tl(addr
, offset
);
1009 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
1013 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
1015 target_ulong pc
= ctx
->pc
;
1017 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
1018 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
1023 pc
&= ~(target_ulong
)3;
1028 static void gen_ld (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1029 int rt
, int base
, int16_t offset
)
1031 const char *opn
= "ld";
1034 if (rt
== 0 && env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
1035 /* Loongson CPU uses a load to zero register for prefetch.
1036 We emulate it as a NOP. On other CPU we must perform the
1037 actual memory access. */
1042 t0
= tcg_temp_new();
1043 t1
= tcg_temp_new();
1044 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1047 #if defined(TARGET_MIPS64)
1049 save_cpu_state(ctx
, 0);
1050 op_ld_lwu(t0
, t0
, ctx
);
1051 gen_store_gpr(t0
, rt
);
1055 save_cpu_state(ctx
, 0);
1056 op_ld_ld(t0
, t0
, ctx
);
1057 gen_store_gpr(t0
, rt
);
1061 save_cpu_state(ctx
, 1);
1062 op_ld_lld(t0
, t0
, ctx
);
1063 gen_store_gpr(t0
, rt
);
1067 save_cpu_state(ctx
, 1);
1068 gen_load_gpr(t1
, rt
);
1069 gen_helper_3i(ldl
, t1
, t1
, t0
, ctx
->mem_idx
);
1070 gen_store_gpr(t1
, rt
);
1074 save_cpu_state(ctx
, 1);
1075 gen_load_gpr(t1
, rt
);
1076 gen_helper_3i(ldr
, t1
, t1
, t0
, ctx
->mem_idx
);
1077 gen_store_gpr(t1
, rt
);
1081 save_cpu_state(ctx
, 0);
1082 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1083 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1084 op_ld_ld(t0
, t0
, ctx
);
1085 gen_store_gpr(t0
, rt
);
1090 save_cpu_state(ctx
, 0);
1091 tcg_gen_movi_tl(t1
, pc_relative_pc(ctx
));
1092 gen_op_addr_add(ctx
, t0
, t0
, t1
);
1093 op_ld_lw(t0
, t0
, ctx
);
1094 gen_store_gpr(t0
, rt
);
1098 save_cpu_state(ctx
, 0);
1099 op_ld_lw(t0
, t0
, ctx
);
1100 gen_store_gpr(t0
, rt
);
1104 save_cpu_state(ctx
, 0);
1105 op_ld_lh(t0
, t0
, ctx
);
1106 gen_store_gpr(t0
, rt
);
1110 save_cpu_state(ctx
, 0);
1111 op_ld_lhu(t0
, t0
, ctx
);
1112 gen_store_gpr(t0
, rt
);
1116 save_cpu_state(ctx
, 0);
1117 op_ld_lb(t0
, t0
, ctx
);
1118 gen_store_gpr(t0
, rt
);
1122 save_cpu_state(ctx
, 0);
1123 op_ld_lbu(t0
, t0
, ctx
);
1124 gen_store_gpr(t0
, rt
);
1128 save_cpu_state(ctx
, 1);
1129 gen_load_gpr(t1
, rt
);
1130 gen_helper_3i(lwl
, t1
, t1
, t0
, ctx
->mem_idx
);
1131 gen_store_gpr(t1
, rt
);
1135 save_cpu_state(ctx
, 1);
1136 gen_load_gpr(t1
, rt
);
1137 gen_helper_3i(lwr
, t1
, t1
, t0
, ctx
->mem_idx
);
1138 gen_store_gpr(t1
, rt
);
1142 save_cpu_state(ctx
, 1);
1143 op_ld_ll(t0
, t0
, ctx
);
1144 gen_store_gpr(t0
, rt
);
1148 (void)opn
; /* avoid a compiler warning */
1149 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1155 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
1156 int base
, int16_t offset
)
1158 const char *opn
= "st";
1159 TCGv t0
= tcg_temp_new();
1160 TCGv t1
= tcg_temp_new();
1162 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1163 gen_load_gpr(t1
, rt
);
1165 #if defined(TARGET_MIPS64)
1167 save_cpu_state(ctx
, 0);
1168 op_st_sd(t1
, t0
, ctx
);
1172 save_cpu_state(ctx
, 1);
1173 gen_helper_2i(sdl
, t1
, t0
, ctx
->mem_idx
);
1177 save_cpu_state(ctx
, 1);
1178 gen_helper_2i(sdr
, t1
, t0
, ctx
->mem_idx
);
1183 save_cpu_state(ctx
, 0);
1184 op_st_sw(t1
, t0
, ctx
);
1188 save_cpu_state(ctx
, 0);
1189 op_st_sh(t1
, t0
, ctx
);
1193 save_cpu_state(ctx
, 0);
1194 op_st_sb(t1
, t0
, ctx
);
1198 save_cpu_state(ctx
, 1);
1199 gen_helper_2i(swl
, t1
, t0
, ctx
->mem_idx
);
1203 save_cpu_state(ctx
, 1);
1204 gen_helper_2i(swr
, t1
, t0
, ctx
->mem_idx
);
1208 (void)opn
; /* avoid a compiler warning */
1209 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1215 /* Store conditional */
1216 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
1217 int base
, int16_t offset
)
1219 const char *opn
= "st_cond";
1222 t0
= tcg_temp_local_new();
1224 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1225 /* Don't do NOP if destination is zero: we must perform the actual
1228 t1
= tcg_temp_local_new();
1229 gen_load_gpr(t1
, rt
);
1231 #if defined(TARGET_MIPS64)
1233 save_cpu_state(ctx
, 1);
1234 op_st_scd(t1
, t0
, rt
, ctx
);
1239 save_cpu_state(ctx
, 1);
1240 op_st_sc(t1
, t0
, rt
, ctx
);
1244 (void)opn
; /* avoid a compiler warning */
1245 MIPS_DEBUG("%s %s, %d(%s)", opn
, regnames
[rt
], offset
, regnames
[base
]);
1250 /* Load and store */
1251 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
1252 int base
, int16_t offset
)
1254 const char *opn
= "flt_ldst";
1255 TCGv t0
= tcg_temp_new();
1257 gen_base_offset_addr(ctx
, t0
, base
, offset
);
1258 /* Don't do NOP if destination is zero: we must perform the actual
1263 TCGv_i32 fp0
= tcg_temp_new_i32();
1265 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
1266 tcg_gen_trunc_tl_i32(fp0
, t0
);
1267 gen_store_fpr32(fp0
, ft
);
1268 tcg_temp_free_i32(fp0
);
1274 TCGv_i32 fp0
= tcg_temp_new_i32();
1275 TCGv t1
= tcg_temp_new();
1277 gen_load_fpr32(fp0
, ft
);
1278 tcg_gen_extu_i32_tl(t1
, fp0
);
1279 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
1281 tcg_temp_free_i32(fp0
);
1287 TCGv_i64 fp0
= tcg_temp_new_i64();
1289 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
1290 gen_store_fpr64(ctx
, fp0
, ft
);
1291 tcg_temp_free_i64(fp0
);
1297 TCGv_i64 fp0
= tcg_temp_new_i64();
1299 gen_load_fpr64(ctx
, fp0
, ft
);
1300 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
1301 tcg_temp_free_i64(fp0
);
1307 generate_exception(ctx
, EXCP_RI
);
1310 (void)opn
; /* avoid a compiler warning */
1311 MIPS_DEBUG("%s %s, %d(%s)", opn
, fregnames
[ft
], offset
, regnames
[base
]);
1316 static void gen_cop1_ldst(CPUMIPSState
*env
, DisasContext
*ctx
,
1317 uint32_t op
, int rt
, int rs
, int16_t imm
)
1319 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
1320 check_cp1_enabled(ctx
);
1321 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
1323 generate_exception_err(ctx
, EXCP_CpU
, 1);
1327 /* Arithmetic with immediate operand */
1328 static void gen_arith_imm (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1329 int rt
, int rs
, int16_t imm
)
1331 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1332 const char *opn
= "imm arith";
1334 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
1335 /* If no destination, treat it as a NOP.
1336 For addi, we must generate the overflow exception when needed. */
1343 TCGv t0
= tcg_temp_local_new();
1344 TCGv t1
= tcg_temp_new();
1345 TCGv t2
= tcg_temp_new();
1346 int l1
= gen_new_label();
1348 gen_load_gpr(t1
, rs
);
1349 tcg_gen_addi_tl(t0
, t1
, uimm
);
1350 tcg_gen_ext32s_tl(t0
, t0
);
1352 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1353 tcg_gen_xori_tl(t2
, t0
, uimm
);
1354 tcg_gen_and_tl(t1
, t1
, t2
);
1356 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1358 /* operands of same sign, result different sign */
1359 generate_exception(ctx
, EXCP_OVERFLOW
);
1361 tcg_gen_ext32s_tl(t0
, t0
);
1362 gen_store_gpr(t0
, rt
);
1369 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1370 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
1372 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1376 #if defined(TARGET_MIPS64)
1379 TCGv t0
= tcg_temp_local_new();
1380 TCGv t1
= tcg_temp_new();
1381 TCGv t2
= tcg_temp_new();
1382 int l1
= gen_new_label();
1384 gen_load_gpr(t1
, rs
);
1385 tcg_gen_addi_tl(t0
, t1
, uimm
);
1387 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
1388 tcg_gen_xori_tl(t2
, t0
, uimm
);
1389 tcg_gen_and_tl(t1
, t1
, t2
);
1391 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1393 /* operands of same sign, result different sign */
1394 generate_exception(ctx
, EXCP_OVERFLOW
);
1396 gen_store_gpr(t0
, rt
);
1403 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1405 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1411 (void)opn
; /* avoid a compiler warning */
1412 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1415 /* Logic with immediate operand */
1416 static void gen_logic_imm (CPUMIPSState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1419 const char *opn
= "imm logic";
1422 /* If no destination, treat it as a NOP. */
1426 uimm
= (uint16_t)imm
;
1429 if (likely(rs
!= 0))
1430 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1432 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
1437 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1439 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1443 if (likely(rs
!= 0))
1444 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
1446 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
1450 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
1454 (void)opn
; /* avoid a compiler warning */
1455 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1458 /* Set on less than with immediate operand */
1459 static void gen_slt_imm (CPUMIPSState
*env
, uint32_t opc
, int rt
, int rs
, int16_t imm
)
1461 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
1462 const char *opn
= "imm arith";
1466 /* If no destination, treat it as a NOP. */
1470 t0
= tcg_temp_new();
1471 gen_load_gpr(t0
, rs
);
1474 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
1478 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
1482 (void)opn
; /* avoid a compiler warning */
1483 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1487 /* Shifts with immediate operand */
1488 static void gen_shift_imm(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1489 int rt
, int rs
, int16_t imm
)
1491 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
1492 const char *opn
= "imm shift";
1496 /* If no destination, treat it as a NOP. */
1501 t0
= tcg_temp_new();
1502 gen_load_gpr(t0
, rs
);
1505 tcg_gen_shli_tl(t0
, t0
, uimm
);
1506 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1510 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1515 tcg_gen_ext32u_tl(t0
, t0
);
1516 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1518 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1524 TCGv_i32 t1
= tcg_temp_new_i32();
1526 tcg_gen_trunc_tl_i32(t1
, t0
);
1527 tcg_gen_rotri_i32(t1
, t1
, uimm
);
1528 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
1529 tcg_temp_free_i32(t1
);
1531 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
1535 #if defined(TARGET_MIPS64)
1537 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
1541 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
1545 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
1550 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
1552 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
1557 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1561 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1565 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1569 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
1574 (void)opn
; /* avoid a compiler warning */
1575 MIPS_DEBUG("%s %s, %s, " TARGET_FMT_lx
, opn
, regnames
[rt
], regnames
[rs
], uimm
);
1580 static void gen_arith (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1581 int rd
, int rs
, int rt
)
1583 const char *opn
= "arith";
1585 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
1586 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
1587 /* If no destination, treat it as a NOP.
1588 For add & sub, we must generate the overflow exception when needed. */
1596 TCGv t0
= tcg_temp_local_new();
1597 TCGv t1
= tcg_temp_new();
1598 TCGv t2
= tcg_temp_new();
1599 int l1
= gen_new_label();
1601 gen_load_gpr(t1
, rs
);
1602 gen_load_gpr(t2
, rt
);
1603 tcg_gen_add_tl(t0
, t1
, t2
);
1604 tcg_gen_ext32s_tl(t0
, t0
);
1605 tcg_gen_xor_tl(t1
, t1
, t2
);
1606 tcg_gen_xor_tl(t2
, t0
, t2
);
1607 tcg_gen_andc_tl(t1
, t2
, t1
);
1609 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1611 /* operands of same sign, result different sign */
1612 generate_exception(ctx
, EXCP_OVERFLOW
);
1614 gen_store_gpr(t0
, rd
);
1620 if (rs
!= 0 && rt
!= 0) {
1621 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1622 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1623 } else if (rs
== 0 && rt
!= 0) {
1624 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1625 } else if (rs
!= 0 && rt
== 0) {
1626 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1628 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1634 TCGv t0
= tcg_temp_local_new();
1635 TCGv t1
= tcg_temp_new();
1636 TCGv t2
= tcg_temp_new();
1637 int l1
= gen_new_label();
1639 gen_load_gpr(t1
, rs
);
1640 gen_load_gpr(t2
, rt
);
1641 tcg_gen_sub_tl(t0
, t1
, t2
);
1642 tcg_gen_ext32s_tl(t0
, t0
);
1643 tcg_gen_xor_tl(t2
, t1
, t2
);
1644 tcg_gen_xor_tl(t1
, t0
, t1
);
1645 tcg_gen_and_tl(t1
, t1
, t2
);
1647 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1649 /* operands of different sign, first operand and result different sign */
1650 generate_exception(ctx
, EXCP_OVERFLOW
);
1652 gen_store_gpr(t0
, rd
);
1658 if (rs
!= 0 && rt
!= 0) {
1659 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1660 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1661 } else if (rs
== 0 && rt
!= 0) {
1662 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1663 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1664 } else if (rs
!= 0 && rt
== 0) {
1665 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1667 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1671 #if defined(TARGET_MIPS64)
1674 TCGv t0
= tcg_temp_local_new();
1675 TCGv t1
= tcg_temp_new();
1676 TCGv t2
= tcg_temp_new();
1677 int l1
= gen_new_label();
1679 gen_load_gpr(t1
, rs
);
1680 gen_load_gpr(t2
, rt
);
1681 tcg_gen_add_tl(t0
, t1
, t2
);
1682 tcg_gen_xor_tl(t1
, t1
, t2
);
1683 tcg_gen_xor_tl(t2
, t0
, t2
);
1684 tcg_gen_andc_tl(t1
, t2
, t1
);
1686 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1688 /* operands of same sign, result different sign */
1689 generate_exception(ctx
, EXCP_OVERFLOW
);
1691 gen_store_gpr(t0
, rd
);
1697 if (rs
!= 0 && rt
!= 0) {
1698 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1699 } else if (rs
== 0 && rt
!= 0) {
1700 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1701 } else if (rs
!= 0 && rt
== 0) {
1702 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1704 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1710 TCGv t0
= tcg_temp_local_new();
1711 TCGv t1
= tcg_temp_new();
1712 TCGv t2
= tcg_temp_new();
1713 int l1
= gen_new_label();
1715 gen_load_gpr(t1
, rs
);
1716 gen_load_gpr(t2
, rt
);
1717 tcg_gen_sub_tl(t0
, t1
, t2
);
1718 tcg_gen_xor_tl(t2
, t1
, t2
);
1719 tcg_gen_xor_tl(t1
, t0
, t1
);
1720 tcg_gen_and_tl(t1
, t1
, t2
);
1722 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
1724 /* operands of different sign, first operand and result different sign */
1725 generate_exception(ctx
, EXCP_OVERFLOW
);
1727 gen_store_gpr(t0
, rd
);
1733 if (rs
!= 0 && rt
!= 0) {
1734 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1735 } else if (rs
== 0 && rt
!= 0) {
1736 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1737 } else if (rs
!= 0 && rt
== 0) {
1738 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1740 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1746 if (likely(rs
!= 0 && rt
!= 0)) {
1747 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1748 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
1750 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1755 (void)opn
; /* avoid a compiler warning */
1756 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1759 /* Conditional move */
1760 static void gen_cond_move (CPUMIPSState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1762 const char *opn
= "cond move";
1766 /* If no destination, treat it as a NOP.
1767 For add & sub, we must generate the overflow exception when needed. */
1772 l1
= gen_new_label();
1775 if (likely(rt
!= 0))
1776 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[rt
], 0, l1
);
1782 if (likely(rt
!= 0))
1783 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[rt
], 0, l1
);
1788 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1790 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1793 (void)opn
; /* avoid a compiler warning */
1794 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1798 static void gen_logic (CPUMIPSState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1800 const char *opn
= "logic";
1803 /* If no destination, treat it as a NOP. */
1810 if (likely(rs
!= 0 && rt
!= 0)) {
1811 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1813 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1818 if (rs
!= 0 && rt
!= 0) {
1819 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1820 } else if (rs
== 0 && rt
!= 0) {
1821 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1822 } else if (rs
!= 0 && rt
== 0) {
1823 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1825 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
1830 if (likely(rs
!= 0 && rt
!= 0)) {
1831 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1832 } else if (rs
== 0 && rt
!= 0) {
1833 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1834 } else if (rs
!= 0 && rt
== 0) {
1835 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1837 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1842 if (likely(rs
!= 0 && rt
!= 0)) {
1843 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
1844 } else if (rs
== 0 && rt
!= 0) {
1845 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
1846 } else if (rs
!= 0 && rt
== 0) {
1847 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
1849 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
1854 (void)opn
; /* avoid a compiler warning */
1855 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1858 /* Set on lower than */
1859 static void gen_slt (CPUMIPSState
*env
, uint32_t opc
, int rd
, int rs
, int rt
)
1861 const char *opn
= "slt";
1865 /* If no destination, treat it as a NOP. */
1870 t0
= tcg_temp_new();
1871 t1
= tcg_temp_new();
1872 gen_load_gpr(t0
, rs
);
1873 gen_load_gpr(t1
, rt
);
1876 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
1880 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
1884 (void)opn
; /* avoid a compiler warning */
1885 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1891 static void gen_shift (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
1892 int rd
, int rs
, int rt
)
1894 const char *opn
= "shifts";
1898 /* If no destination, treat it as a NOP.
1899 For add & sub, we must generate the overflow exception when needed. */
1904 t0
= tcg_temp_new();
1905 t1
= tcg_temp_new();
1906 gen_load_gpr(t0
, rs
);
1907 gen_load_gpr(t1
, rt
);
1910 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1911 tcg_gen_shl_tl(t0
, t1
, t0
);
1912 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1916 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1917 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1921 tcg_gen_ext32u_tl(t1
, t1
);
1922 tcg_gen_andi_tl(t0
, t0
, 0x1f);
1923 tcg_gen_shr_tl(t0
, t1
, t0
);
1924 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
1929 TCGv_i32 t2
= tcg_temp_new_i32();
1930 TCGv_i32 t3
= tcg_temp_new_i32();
1932 tcg_gen_trunc_tl_i32(t2
, t0
);
1933 tcg_gen_trunc_tl_i32(t3
, t1
);
1934 tcg_gen_andi_i32(t2
, t2
, 0x1f);
1935 tcg_gen_rotr_i32(t2
, t3
, t2
);
1936 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
1937 tcg_temp_free_i32(t2
);
1938 tcg_temp_free_i32(t3
);
1942 #if defined(TARGET_MIPS64)
1944 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1945 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
1949 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1950 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
1954 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1955 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
1959 tcg_gen_andi_tl(t0
, t0
, 0x3f);
1960 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
1965 (void)opn
; /* avoid a compiler warning */
1966 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
1971 /* Arithmetic on HI/LO registers */
1972 static void gen_HILO (DisasContext
*ctx
, uint32_t opc
, int reg
)
1974 const char *opn
= "hilo";
1976 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
1983 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[0]);
1987 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[0]);
1992 tcg_gen_mov_tl(cpu_HI
[0], cpu_gpr
[reg
]);
1994 tcg_gen_movi_tl(cpu_HI
[0], 0);
1999 tcg_gen_mov_tl(cpu_LO
[0], cpu_gpr
[reg
]);
2001 tcg_gen_movi_tl(cpu_LO
[0], 0);
2005 (void)opn
; /* avoid a compiler warning */
2006 MIPS_DEBUG("%s %s", opn
, regnames
[reg
]);
2009 static void gen_muldiv (DisasContext
*ctx
, uint32_t opc
,
2012 const char *opn
= "mul/div";
2018 #if defined(TARGET_MIPS64)
2022 t0
= tcg_temp_local_new();
2023 t1
= tcg_temp_local_new();
2026 t0
= tcg_temp_new();
2027 t1
= tcg_temp_new();
2031 gen_load_gpr(t0
, rs
);
2032 gen_load_gpr(t1
, rt
);
2036 int l1
= gen_new_label();
2037 int l2
= gen_new_label();
2039 tcg_gen_ext32s_tl(t0
, t0
);
2040 tcg_gen_ext32s_tl(t1
, t1
);
2041 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2042 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2043 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2045 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2046 tcg_gen_movi_tl(cpu_HI
[0], 0);
2049 tcg_gen_div_tl(cpu_LO
[0], t0
, t1
);
2050 tcg_gen_rem_tl(cpu_HI
[0], t0
, t1
);
2051 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2052 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2059 int l1
= gen_new_label();
2061 tcg_gen_ext32u_tl(t0
, t0
);
2062 tcg_gen_ext32u_tl(t1
, t1
);
2063 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2064 tcg_gen_divu_tl(cpu_LO
[0], t0
, t1
);
2065 tcg_gen_remu_tl(cpu_HI
[0], t0
, t1
);
2066 tcg_gen_ext32s_tl(cpu_LO
[0], cpu_LO
[0]);
2067 tcg_gen_ext32s_tl(cpu_HI
[0], cpu_HI
[0]);
2074 TCGv_i64 t2
= tcg_temp_new_i64();
2075 TCGv_i64 t3
= tcg_temp_new_i64();
2077 tcg_gen_ext_tl_i64(t2
, t0
);
2078 tcg_gen_ext_tl_i64(t3
, t1
);
2079 tcg_gen_mul_i64(t2
, t2
, t3
);
2080 tcg_temp_free_i64(t3
);
2081 tcg_gen_trunc_i64_tl(t0
, t2
);
2082 tcg_gen_shri_i64(t2
, t2
, 32);
2083 tcg_gen_trunc_i64_tl(t1
, t2
);
2084 tcg_temp_free_i64(t2
);
2085 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2086 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2092 TCGv_i64 t2
= tcg_temp_new_i64();
2093 TCGv_i64 t3
= tcg_temp_new_i64();
2095 tcg_gen_ext32u_tl(t0
, t0
);
2096 tcg_gen_ext32u_tl(t1
, t1
);
2097 tcg_gen_extu_tl_i64(t2
, t0
);
2098 tcg_gen_extu_tl_i64(t3
, t1
);
2099 tcg_gen_mul_i64(t2
, t2
, t3
);
2100 tcg_temp_free_i64(t3
);
2101 tcg_gen_trunc_i64_tl(t0
, t2
);
2102 tcg_gen_shri_i64(t2
, t2
, 32);
2103 tcg_gen_trunc_i64_tl(t1
, t2
);
2104 tcg_temp_free_i64(t2
);
2105 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2106 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2110 #if defined(TARGET_MIPS64)
2113 int l1
= gen_new_label();
2114 int l2
= gen_new_label();
2116 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2117 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2118 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2119 tcg_gen_mov_tl(cpu_LO
[0], t0
);
2120 tcg_gen_movi_tl(cpu_HI
[0], 0);
2123 tcg_gen_div_i64(cpu_LO
[0], t0
, t1
);
2124 tcg_gen_rem_i64(cpu_HI
[0], t0
, t1
);
2131 int l1
= gen_new_label();
2133 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2134 tcg_gen_divu_i64(cpu_LO
[0], t0
, t1
);
2135 tcg_gen_remu_i64(cpu_HI
[0], t0
, t1
);
2141 gen_helper_dmult(t0
, t1
);
2145 gen_helper_dmultu(t0
, t1
);
2151 TCGv_i64 t2
= tcg_temp_new_i64();
2152 TCGv_i64 t3
= tcg_temp_new_i64();
2154 tcg_gen_ext_tl_i64(t2
, t0
);
2155 tcg_gen_ext_tl_i64(t3
, t1
);
2156 tcg_gen_mul_i64(t2
, t2
, t3
);
2157 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2158 tcg_gen_add_i64(t2
, t2
, t3
);
2159 tcg_temp_free_i64(t3
);
2160 tcg_gen_trunc_i64_tl(t0
, t2
);
2161 tcg_gen_shri_i64(t2
, t2
, 32);
2162 tcg_gen_trunc_i64_tl(t1
, t2
);
2163 tcg_temp_free_i64(t2
);
2164 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2165 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2171 TCGv_i64 t2
= tcg_temp_new_i64();
2172 TCGv_i64 t3
= tcg_temp_new_i64();
2174 tcg_gen_ext32u_tl(t0
, t0
);
2175 tcg_gen_ext32u_tl(t1
, t1
);
2176 tcg_gen_extu_tl_i64(t2
, t0
);
2177 tcg_gen_extu_tl_i64(t3
, t1
);
2178 tcg_gen_mul_i64(t2
, t2
, t3
);
2179 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2180 tcg_gen_add_i64(t2
, t2
, t3
);
2181 tcg_temp_free_i64(t3
);
2182 tcg_gen_trunc_i64_tl(t0
, t2
);
2183 tcg_gen_shri_i64(t2
, t2
, 32);
2184 tcg_gen_trunc_i64_tl(t1
, t2
);
2185 tcg_temp_free_i64(t2
);
2186 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2187 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2193 TCGv_i64 t2
= tcg_temp_new_i64();
2194 TCGv_i64 t3
= tcg_temp_new_i64();
2196 tcg_gen_ext_tl_i64(t2
, t0
);
2197 tcg_gen_ext_tl_i64(t3
, t1
);
2198 tcg_gen_mul_i64(t2
, t2
, t3
);
2199 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2200 tcg_gen_sub_i64(t2
, t3
, t2
);
2201 tcg_temp_free_i64(t3
);
2202 tcg_gen_trunc_i64_tl(t0
, t2
);
2203 tcg_gen_shri_i64(t2
, t2
, 32);
2204 tcg_gen_trunc_i64_tl(t1
, t2
);
2205 tcg_temp_free_i64(t2
);
2206 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2207 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2213 TCGv_i64 t2
= tcg_temp_new_i64();
2214 TCGv_i64 t3
= tcg_temp_new_i64();
2216 tcg_gen_ext32u_tl(t0
, t0
);
2217 tcg_gen_ext32u_tl(t1
, t1
);
2218 tcg_gen_extu_tl_i64(t2
, t0
);
2219 tcg_gen_extu_tl_i64(t3
, t1
);
2220 tcg_gen_mul_i64(t2
, t2
, t3
);
2221 tcg_gen_concat_tl_i64(t3
, cpu_LO
[0], cpu_HI
[0]);
2222 tcg_gen_sub_i64(t2
, t3
, t2
);
2223 tcg_temp_free_i64(t3
);
2224 tcg_gen_trunc_i64_tl(t0
, t2
);
2225 tcg_gen_shri_i64(t2
, t2
, 32);
2226 tcg_gen_trunc_i64_tl(t1
, t2
);
2227 tcg_temp_free_i64(t2
);
2228 tcg_gen_ext32s_tl(cpu_LO
[0], t0
);
2229 tcg_gen_ext32s_tl(cpu_HI
[0], t1
);
2235 generate_exception(ctx
, EXCP_RI
);
2238 (void)opn
; /* avoid a compiler warning */
2239 MIPS_DEBUG("%s %s %s", opn
, regnames
[rs
], regnames
[rt
]);
2245 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
2246 int rd
, int rs
, int rt
)
2248 const char *opn
= "mul vr54xx";
2249 TCGv t0
= tcg_temp_new();
2250 TCGv t1
= tcg_temp_new();
2252 gen_load_gpr(t0
, rs
);
2253 gen_load_gpr(t1
, rt
);
2256 case OPC_VR54XX_MULS
:
2257 gen_helper_muls(t0
, t0
, t1
);
2260 case OPC_VR54XX_MULSU
:
2261 gen_helper_mulsu(t0
, t0
, t1
);
2264 case OPC_VR54XX_MACC
:
2265 gen_helper_macc(t0
, t0
, t1
);
2268 case OPC_VR54XX_MACCU
:
2269 gen_helper_maccu(t0
, t0
, t1
);
2272 case OPC_VR54XX_MSAC
:
2273 gen_helper_msac(t0
, t0
, t1
);
2276 case OPC_VR54XX_MSACU
:
2277 gen_helper_msacu(t0
, t0
, t1
);
2280 case OPC_VR54XX_MULHI
:
2281 gen_helper_mulhi(t0
, t0
, t1
);
2284 case OPC_VR54XX_MULHIU
:
2285 gen_helper_mulhiu(t0
, t0
, t1
);
2288 case OPC_VR54XX_MULSHI
:
2289 gen_helper_mulshi(t0
, t0
, t1
);
2292 case OPC_VR54XX_MULSHIU
:
2293 gen_helper_mulshiu(t0
, t0
, t1
);
2296 case OPC_VR54XX_MACCHI
:
2297 gen_helper_macchi(t0
, t0
, t1
);
2300 case OPC_VR54XX_MACCHIU
:
2301 gen_helper_macchiu(t0
, t0
, t1
);
2304 case OPC_VR54XX_MSACHI
:
2305 gen_helper_msachi(t0
, t0
, t1
);
2308 case OPC_VR54XX_MSACHIU
:
2309 gen_helper_msachiu(t0
, t0
, t1
);
2313 MIPS_INVAL("mul vr54xx");
2314 generate_exception(ctx
, EXCP_RI
);
2317 gen_store_gpr(t0
, rd
);
2318 (void)opn
; /* avoid a compiler warning */
2319 MIPS_DEBUG("%s %s, %s, %s", opn
, regnames
[rd
], regnames
[rs
], regnames
[rt
]);
2326 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
2329 const char *opn
= "CLx";
2337 t0
= tcg_temp_new();
2338 gen_load_gpr(t0
, rs
);
2341 gen_helper_clo(cpu_gpr
[rd
], t0
);
2345 gen_helper_clz(cpu_gpr
[rd
], t0
);
2348 #if defined(TARGET_MIPS64)
2350 gen_helper_dclo(cpu_gpr
[rd
], t0
);
2354 gen_helper_dclz(cpu_gpr
[rd
], t0
);
2359 (void)opn
; /* avoid a compiler warning */
2360 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2364 /* Godson integer instructions */
2365 static void gen_loongson_integer (DisasContext
*ctx
, uint32_t opc
,
2366 int rd
, int rs
, int rt
)
2368 const char *opn
= "loongson";
2380 case OPC_MULTU_G_2E
:
2381 case OPC_MULTU_G_2F
:
2382 #if defined(TARGET_MIPS64)
2383 case OPC_DMULT_G_2E
:
2384 case OPC_DMULT_G_2F
:
2385 case OPC_DMULTU_G_2E
:
2386 case OPC_DMULTU_G_2F
:
2388 t0
= tcg_temp_new();
2389 t1
= tcg_temp_new();
2392 t0
= tcg_temp_local_new();
2393 t1
= tcg_temp_local_new();
2397 gen_load_gpr(t0
, rs
);
2398 gen_load_gpr(t1
, rt
);
2403 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2404 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2407 case OPC_MULTU_G_2E
:
2408 case OPC_MULTU_G_2F
:
2409 tcg_gen_ext32u_tl(t0
, t0
);
2410 tcg_gen_ext32u_tl(t1
, t1
);
2411 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2412 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2418 int l1
= gen_new_label();
2419 int l2
= gen_new_label();
2420 int l3
= gen_new_label();
2421 tcg_gen_ext32s_tl(t0
, t0
);
2422 tcg_gen_ext32s_tl(t1
, t1
);
2423 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2424 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2427 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2428 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2429 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2432 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2433 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2441 int l1
= gen_new_label();
2442 int l2
= gen_new_label();
2443 tcg_gen_ext32u_tl(t0
, t0
);
2444 tcg_gen_ext32u_tl(t1
, t1
);
2445 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2446 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2449 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2450 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2458 int l1
= gen_new_label();
2459 int l2
= gen_new_label();
2460 int l3
= gen_new_label();
2461 tcg_gen_ext32u_tl(t0
, t0
);
2462 tcg_gen_ext32u_tl(t1
, t1
);
2463 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2464 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
2465 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
2467 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2470 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2471 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2479 int l1
= gen_new_label();
2480 int l2
= gen_new_label();
2481 tcg_gen_ext32u_tl(t0
, t0
);
2482 tcg_gen_ext32u_tl(t1
, t1
);
2483 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2484 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2487 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2488 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2493 #if defined(TARGET_MIPS64)
2494 case OPC_DMULT_G_2E
:
2495 case OPC_DMULT_G_2F
:
2496 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2499 case OPC_DMULTU_G_2E
:
2500 case OPC_DMULTU_G_2F
:
2501 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
2507 int l1
= gen_new_label();
2508 int l2
= gen_new_label();
2509 int l3
= gen_new_label();
2510 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2511 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2514 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2515 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2516 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
2519 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
2524 case OPC_DDIVU_G_2E
:
2525 case OPC_DDIVU_G_2F
:
2527 int l1
= gen_new_label();
2528 int l2
= gen_new_label();
2529 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2530 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2533 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
2541 int l1
= gen_new_label();
2542 int l2
= gen_new_label();
2543 int l3
= gen_new_label();
2544 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
2545 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
2546 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
2548 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2551 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
2556 case OPC_DMODU_G_2E
:
2557 case OPC_DMODU_G_2F
:
2559 int l1
= gen_new_label();
2560 int l2
= gen_new_label();
2561 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
2562 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2565 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
2573 (void)opn
; /* avoid a compiler warning */
2574 MIPS_DEBUG("%s %s, %s", opn
, regnames
[rd
], regnames
[rs
]);
2580 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
2581 int rs
, int rt
, int16_t imm
)
2584 TCGv t0
= tcg_temp_new();
2585 TCGv t1
= tcg_temp_new();
2588 /* Load needed operands */
2596 /* Compare two registers */
2598 gen_load_gpr(t0
, rs
);
2599 gen_load_gpr(t1
, rt
);
2609 /* Compare register to immediate */
2610 if (rs
!= 0 || imm
!= 0) {
2611 gen_load_gpr(t0
, rs
);
2612 tcg_gen_movi_tl(t1
, (int32_t)imm
);
2619 case OPC_TEQ
: /* rs == rs */
2620 case OPC_TEQI
: /* r0 == 0 */
2621 case OPC_TGE
: /* rs >= rs */
2622 case OPC_TGEI
: /* r0 >= 0 */
2623 case OPC_TGEU
: /* rs >= rs unsigned */
2624 case OPC_TGEIU
: /* r0 >= 0 unsigned */
2626 generate_exception(ctx
, EXCP_TRAP
);
2628 case OPC_TLT
: /* rs < rs */
2629 case OPC_TLTI
: /* r0 < 0 */
2630 case OPC_TLTU
: /* rs < rs unsigned */
2631 case OPC_TLTIU
: /* r0 < 0 unsigned */
2632 case OPC_TNE
: /* rs != rs */
2633 case OPC_TNEI
: /* r0 != 0 */
2634 /* Never trap: treat as NOP. */
2638 int l1
= gen_new_label();
2643 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
2647 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
2651 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
2655 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
2659 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
2663 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
2666 generate_exception(ctx
, EXCP_TRAP
);
2673 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
2675 TranslationBlock
*tb
;
2677 if ((tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
) &&
2678 likely(!ctx
->singlestep_enabled
)) {
2681 tcg_gen_exit_tb((tcg_target_long
)tb
+ n
);
2684 if (ctx
->singlestep_enabled
) {
2685 save_cpu_state(ctx
, 0);
2686 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
2692 /* Branches (before delay slot) */
2693 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
2695 int rs
, int rt
, int32_t offset
)
2697 target_ulong btgt
= -1;
2699 int bcond_compute
= 0;
2700 TCGv t0
= tcg_temp_new();
2701 TCGv t1
= tcg_temp_new();
2703 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2704 #ifdef MIPS_DEBUG_DISAS
2705 LOG_DISAS("Branch in delay slot at PC 0x" TARGET_FMT_lx
"\n", ctx
->pc
);
2707 generate_exception(ctx
, EXCP_RI
);
2711 /* Load needed operands */
2717 /* Compare two registers */
2719 gen_load_gpr(t0
, rs
);
2720 gen_load_gpr(t1
, rt
);
2723 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2739 /* Compare to zero */
2741 gen_load_gpr(t0
, rs
);
2744 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
2751 /* Jump to immediate */
2752 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
2758 /* Jump to register */
2759 if (offset
!= 0 && offset
!= 16) {
2760 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
2761 others are reserved. */
2762 MIPS_INVAL("jump hint");
2763 generate_exception(ctx
, EXCP_RI
);
2766 gen_load_gpr(btarget
, rs
);
2769 MIPS_INVAL("branch/jump");
2770 generate_exception(ctx
, EXCP_RI
);
2773 if (bcond_compute
== 0) {
2774 /* No condition to be computed */
2776 case OPC_BEQ
: /* rx == rx */
2777 case OPC_BEQL
: /* rx == rx likely */
2778 case OPC_BGEZ
: /* 0 >= 0 */
2779 case OPC_BGEZL
: /* 0 >= 0 likely */
2780 case OPC_BLEZ
: /* 0 <= 0 */
2781 case OPC_BLEZL
: /* 0 <= 0 likely */
2783 ctx
->hflags
|= MIPS_HFLAG_B
;
2784 MIPS_DEBUG("balways");
2787 case OPC_BGEZAL
: /* 0 >= 0 */
2788 case OPC_BGEZALL
: /* 0 >= 0 likely */
2789 ctx
->hflags
|= (opc
== OPC_BGEZALS
2791 : MIPS_HFLAG_BDS32
);
2792 /* Always take and link */
2794 ctx
->hflags
|= MIPS_HFLAG_B
;
2795 MIPS_DEBUG("balways and link");
2797 case OPC_BNE
: /* rx != rx */
2798 case OPC_BGTZ
: /* 0 > 0 */
2799 case OPC_BLTZ
: /* 0 < 0 */
2801 MIPS_DEBUG("bnever (NOP)");
2804 case OPC_BLTZAL
: /* 0 < 0 */
2805 ctx
->hflags
|= (opc
== OPC_BLTZALS
2807 : MIPS_HFLAG_BDS32
);
2808 /* Handle as an unconditional branch to get correct delay
2811 btgt
= ctx
->pc
+ (opc
== OPC_BLTZALS
? 6 : 8);
2812 ctx
->hflags
|= MIPS_HFLAG_B
;
2813 MIPS_DEBUG("bnever and link");
2815 case OPC_BLTZALL
: /* 0 < 0 likely */
2816 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
2817 /* Skip the instruction in the delay slot */
2818 MIPS_DEBUG("bnever, link and skip");
2821 case OPC_BNEL
: /* rx != rx likely */
2822 case OPC_BGTZL
: /* 0 > 0 likely */
2823 case OPC_BLTZL
: /* 0 < 0 likely */
2824 /* Skip the instruction in the delay slot */
2825 MIPS_DEBUG("bnever and skip");
2829 ctx
->hflags
|= MIPS_HFLAG_B
;
2830 MIPS_DEBUG("j " TARGET_FMT_lx
, btgt
);
2834 ctx
->hflags
|= MIPS_HFLAG_BX
;
2839 ctx
->hflags
|= MIPS_HFLAG_B
;
2840 ctx
->hflags
|= ((opc
== OPC_JALS
|| opc
== OPC_JALXS
)
2842 : MIPS_HFLAG_BDS32
);
2843 MIPS_DEBUG("jal " TARGET_FMT_lx
, btgt
);
2846 ctx
->hflags
|= MIPS_HFLAG_BR
;
2847 if (insn_bytes
== 4)
2848 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
2849 MIPS_DEBUG("jr %s", regnames
[rs
]);
2855 ctx
->hflags
|= MIPS_HFLAG_BR
;
2856 ctx
->hflags
|= (opc
== OPC_JALRS
2858 : MIPS_HFLAG_BDS32
);
2859 MIPS_DEBUG("jalr %s, %s", regnames
[rt
], regnames
[rs
]);
2862 MIPS_INVAL("branch/jump");
2863 generate_exception(ctx
, EXCP_RI
);
2869 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2870 MIPS_DEBUG("beq %s, %s, " TARGET_FMT_lx
,
2871 regnames
[rs
], regnames
[rt
], btgt
);
2874 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
2875 MIPS_DEBUG("beql %s, %s, " TARGET_FMT_lx
,
2876 regnames
[rs
], regnames
[rt
], btgt
);
2879 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2880 MIPS_DEBUG("bne %s, %s, " TARGET_FMT_lx
,
2881 regnames
[rs
], regnames
[rt
], btgt
);
2884 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
2885 MIPS_DEBUG("bnel %s, %s, " TARGET_FMT_lx
,
2886 regnames
[rs
], regnames
[rt
], btgt
);
2889 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2890 MIPS_DEBUG("bgez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2893 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2894 MIPS_DEBUG("bgezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2898 ctx
->hflags
|= (opc
== OPC_BGEZALS
2900 : MIPS_HFLAG_BDS32
);
2901 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2902 MIPS_DEBUG("bgezal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2906 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
2908 MIPS_DEBUG("bgezall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2911 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2912 MIPS_DEBUG("bgtz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2915 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
2916 MIPS_DEBUG("bgtzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2919 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2920 MIPS_DEBUG("blez %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2923 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
2924 MIPS_DEBUG("blezl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2927 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2928 MIPS_DEBUG("bltz %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2931 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2932 MIPS_DEBUG("bltzl %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2936 ctx
->hflags
|= (opc
== OPC_BLTZALS
2938 : MIPS_HFLAG_BDS32
);
2939 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2941 MIPS_DEBUG("bltzal %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2943 ctx
->hflags
|= MIPS_HFLAG_BC
;
2946 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
2948 MIPS_DEBUG("bltzall %s, " TARGET_FMT_lx
, regnames
[rs
], btgt
);
2950 ctx
->hflags
|= MIPS_HFLAG_BL
;
2953 MIPS_INVAL("conditional branch/jump");
2954 generate_exception(ctx
, EXCP_RI
);
2958 MIPS_DEBUG("enter ds: link %d cond %02x target " TARGET_FMT_lx
,
2959 blink
, ctx
->hflags
, btgt
);
2961 ctx
->btarget
= btgt
;
2963 int post_delay
= insn_bytes
;
2964 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
2966 if (opc
!= OPC_JALRC
)
2967 post_delay
+= ((ctx
->hflags
& MIPS_HFLAG_BDS16
) ? 2 : 4);
2969 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
2973 if (insn_bytes
== 2)
2974 ctx
->hflags
|= MIPS_HFLAG_B16
;
2979 /* special3 bitfield operations */
2980 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
2981 int rs
, int lsb
, int msb
)
2983 TCGv t0
= tcg_temp_new();
2984 TCGv t1
= tcg_temp_new();
2987 gen_load_gpr(t1
, rs
);
2992 tcg_gen_shri_tl(t0
, t1
, lsb
);
2994 tcg_gen_andi_tl(t0
, t0
, (1 << (msb
+ 1)) - 1);
2996 tcg_gen_ext32s_tl(t0
, t0
);
2999 #if defined(TARGET_MIPS64)
3001 tcg_gen_shri_tl(t0
, t1
, lsb
);
3003 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1 + 32)) - 1);
3007 tcg_gen_shri_tl(t0
, t1
, lsb
+ 32);
3008 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3011 tcg_gen_shri_tl(t0
, t1
, lsb
);
3012 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
3018 mask
= ((msb
- lsb
+ 1 < 32) ? ((1 << (msb
- lsb
+ 1)) - 1) : ~0) << lsb
;
3019 gen_load_gpr(t0
, rt
);
3020 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3021 tcg_gen_shli_tl(t1
, t1
, lsb
);
3022 tcg_gen_andi_tl(t1
, t1
, mask
);
3023 tcg_gen_or_tl(t0
, t0
, t1
);
3024 tcg_gen_ext32s_tl(t0
, t0
);
3026 #if defined(TARGET_MIPS64)
3030 mask
= ((msb
- lsb
+ 1 + 32 < 64) ? ((1ULL << (msb
- lsb
+ 1 + 32)) - 1) : ~0ULL) << lsb
;
3031 gen_load_gpr(t0
, rt
);
3032 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3033 tcg_gen_shli_tl(t1
, t1
, lsb
);
3034 tcg_gen_andi_tl(t1
, t1
, mask
);
3035 tcg_gen_or_tl(t0
, t0
, t1
);
3040 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << (lsb
+ 32);
3041 gen_load_gpr(t0
, rt
);
3042 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3043 tcg_gen_shli_tl(t1
, t1
, lsb
+ 32);
3044 tcg_gen_andi_tl(t1
, t1
, mask
);
3045 tcg_gen_or_tl(t0
, t0
, t1
);
3050 gen_load_gpr(t0
, rt
);
3051 mask
= ((1ULL << (msb
- lsb
+ 1)) - 1) << lsb
;
3052 gen_load_gpr(t0
, rt
);
3053 tcg_gen_andi_tl(t0
, t0
, ~mask
);
3054 tcg_gen_shli_tl(t1
, t1
, lsb
);
3055 tcg_gen_andi_tl(t1
, t1
, mask
);
3056 tcg_gen_or_tl(t0
, t0
, t1
);
3061 MIPS_INVAL("bitops");
3062 generate_exception(ctx
, EXCP_RI
);
3067 gen_store_gpr(t0
, rt
);
3072 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
3077 /* If no destination, treat it as a NOP. */
3082 t0
= tcg_temp_new();
3083 gen_load_gpr(t0
, rt
);
3087 TCGv t1
= tcg_temp_new();
3089 tcg_gen_shri_tl(t1
, t0
, 8);
3090 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
3091 tcg_gen_shli_tl(t0
, t0
, 8);
3092 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
3093 tcg_gen_or_tl(t0
, t0
, t1
);
3095 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3099 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
3102 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
3104 #if defined(TARGET_MIPS64)
3107 TCGv t1
= tcg_temp_new();
3109 tcg_gen_shri_tl(t1
, t0
, 8);
3110 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
3111 tcg_gen_shli_tl(t0
, t0
, 8);
3112 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
3113 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3119 TCGv t1
= tcg_temp_new();
3121 tcg_gen_shri_tl(t1
, t0
, 16);
3122 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
3123 tcg_gen_shli_tl(t0
, t0
, 16);
3124 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
3125 tcg_gen_or_tl(t0
, t0
, t1
);
3126 tcg_gen_shri_tl(t1
, t0
, 32);
3127 tcg_gen_shli_tl(t0
, t0
, 32);
3128 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
3134 MIPS_INVAL("bsfhl");
3135 generate_exception(ctx
, EXCP_RI
);
3142 #ifndef CONFIG_USER_ONLY
3143 /* CP0 (MMU and control) */
3144 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
3146 TCGv_i32 t0
= tcg_temp_new_i32();
3148 tcg_gen_ld_i32(t0
, cpu_env
, off
);
3149 tcg_gen_ext_i32_tl(arg
, t0
);
3150 tcg_temp_free_i32(t0
);
3153 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
3155 tcg_gen_ld_tl(arg
, cpu_env
, off
);
3156 tcg_gen_ext32s_tl(arg
, arg
);
3159 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
3161 TCGv_i32 t0
= tcg_temp_new_i32();
3163 tcg_gen_trunc_tl_i32(t0
, arg
);
3164 tcg_gen_st_i32(t0
, cpu_env
, off
);
3165 tcg_temp_free_i32(t0
);
3168 static inline void gen_mtc0_store64 (TCGv arg
, target_ulong off
)
3170 tcg_gen_ext32s_tl(arg
, arg
);
3171 tcg_gen_st_tl(arg
, cpu_env
, off
);
3174 static void gen_mfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3176 const char *rn
= "invalid";
3179 check_insn(env
, ctx
, ISA_MIPS32
);
3185 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
3189 check_insn(env
, ctx
, ASE_MT
);
3190 gen_helper_mfc0_mvpcontrol(arg
);
3194 check_insn(env
, ctx
, ASE_MT
);
3195 gen_helper_mfc0_mvpconf0(arg
);
3199 check_insn(env
, ctx
, ASE_MT
);
3200 gen_helper_mfc0_mvpconf1(arg
);
3210 gen_helper_mfc0_random(arg
);
3214 check_insn(env
, ctx
, ASE_MT
);
3215 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
3219 check_insn(env
, ctx
, ASE_MT
);
3220 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
3224 check_insn(env
, ctx
, ASE_MT
);
3225 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
3229 check_insn(env
, ctx
, ASE_MT
);
3230 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
3234 check_insn(env
, ctx
, ASE_MT
);
3235 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
3239 check_insn(env
, ctx
, ASE_MT
);
3240 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
3241 rn
= "VPEScheFBack";
3244 check_insn(env
, ctx
, ASE_MT
);
3245 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
3255 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
3256 tcg_gen_ext32s_tl(arg
, arg
);
3260 check_insn(env
, ctx
, ASE_MT
);
3261 gen_helper_mfc0_tcstatus(arg
);
3265 check_insn(env
, ctx
, ASE_MT
);
3266 gen_helper_mfc0_tcbind(arg
);
3270 check_insn(env
, ctx
, ASE_MT
);
3271 gen_helper_mfc0_tcrestart(arg
);
3275 check_insn(env
, ctx
, ASE_MT
);
3276 gen_helper_mfc0_tchalt(arg
);
3280 check_insn(env
, ctx
, ASE_MT
);
3281 gen_helper_mfc0_tccontext(arg
);
3285 check_insn(env
, ctx
, ASE_MT
);
3286 gen_helper_mfc0_tcschedule(arg
);
3290 check_insn(env
, ctx
, ASE_MT
);
3291 gen_helper_mfc0_tcschefback(arg
);
3301 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
3302 tcg_gen_ext32s_tl(arg
, arg
);
3312 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
3313 tcg_gen_ext32s_tl(arg
, arg
);
3317 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
3318 rn
= "ContextConfig";
3327 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
3331 check_insn(env
, ctx
, ISA_MIPS32R2
);
3332 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
3342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
3346 check_insn(env
, ctx
, ISA_MIPS32R2
);
3347 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
3351 check_insn(env
, ctx
, ISA_MIPS32R2
);
3352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
3356 check_insn(env
, ctx
, ISA_MIPS32R2
);
3357 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
3361 check_insn(env
, ctx
, ISA_MIPS32R2
);
3362 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
3366 check_insn(env
, ctx
, ISA_MIPS32R2
);
3367 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
3377 check_insn(env
, ctx
, ISA_MIPS32R2
);
3378 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
3388 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
3389 tcg_gen_ext32s_tl(arg
, arg
);
3399 /* Mark as an IO operation because we read the time. */
3402 gen_helper_mfc0_count(arg
);
3406 /* Break the TB to be able to take timer interrupts immediately
3407 after reading count. */
3408 ctx
->bstate
= BS_STOP
;
3411 /* 6,7 are implementation dependent */
3419 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
3420 tcg_gen_ext32s_tl(arg
, arg
);
3430 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
3433 /* 6,7 are implementation dependent */
3441 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
3445 check_insn(env
, ctx
, ISA_MIPS32R2
);
3446 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
3450 check_insn(env
, ctx
, ISA_MIPS32R2
);
3451 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
3455 check_insn(env
, ctx
, ISA_MIPS32R2
);
3456 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
3466 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
3476 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
3477 tcg_gen_ext32s_tl(arg
, arg
);
3487 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
3491 check_insn(env
, ctx
, ISA_MIPS32R2
);
3492 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
3502 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
3506 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
3510 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
3514 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
3517 /* 4,5 are reserved */
3518 /* 6,7 are implementation dependent */
3520 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
3524 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
3534 gen_helper_mfc0_lladdr(arg
);
3544 gen_helper_1i(mfc0_watchlo
, arg
, sel
);
3554 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
3564 #if defined(TARGET_MIPS64)
3565 check_insn(env
, ctx
, ISA_MIPS3
);
3566 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
3567 tcg_gen_ext32s_tl(arg
, arg
);
3576 /* Officially reserved, but sel 0 is used for R1x000 framemask */
3579 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
3587 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3588 rn
= "'Diagnostic"; /* implementation dependent */
3593 gen_helper_mfc0_debug(arg
); /* EJTAG support */
3597 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
3598 rn
= "TraceControl";
3601 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
3602 rn
= "TraceControl2";
3605 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
3606 rn
= "UserTraceData";
3609 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
3620 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
3621 tcg_gen_ext32s_tl(arg
, arg
);
3631 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
3632 rn
= "Performance0";
3635 // gen_helper_mfc0_performance1(arg);
3636 rn
= "Performance1";
3639 // gen_helper_mfc0_performance2(arg);
3640 rn
= "Performance2";
3643 // gen_helper_mfc0_performance3(arg);
3644 rn
= "Performance3";
3647 // gen_helper_mfc0_performance4(arg);
3648 rn
= "Performance4";
3651 // gen_helper_mfc0_performance5(arg);
3652 rn
= "Performance5";
3655 // gen_helper_mfc0_performance6(arg);
3656 rn
= "Performance6";
3659 // gen_helper_mfc0_performance7(arg);
3660 rn
= "Performance7";
3667 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3673 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
3686 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
3693 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
3706 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
3713 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
3723 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
3724 tcg_gen_ext32s_tl(arg
, arg
);
3735 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
3745 (void)rn
; /* avoid a compiler warning */
3746 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3750 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
3751 generate_exception(ctx
, EXCP_RI
);
3754 static void gen_mtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
3756 const char *rn
= "invalid";
3759 check_insn(env
, ctx
, ISA_MIPS32
);
3768 gen_helper_mtc0_index(arg
);
3772 check_insn(env
, ctx
, ASE_MT
);
3773 gen_helper_mtc0_mvpcontrol(arg
);
3777 check_insn(env
, ctx
, ASE_MT
);
3782 check_insn(env
, ctx
, ASE_MT
);
3797 check_insn(env
, ctx
, ASE_MT
);
3798 gen_helper_mtc0_vpecontrol(arg
);
3802 check_insn(env
, ctx
, ASE_MT
);
3803 gen_helper_mtc0_vpeconf0(arg
);
3807 check_insn(env
, ctx
, ASE_MT
);
3808 gen_helper_mtc0_vpeconf1(arg
);
3812 check_insn(env
, ctx
, ASE_MT
);
3813 gen_helper_mtc0_yqmask(arg
);
3817 check_insn(env
, ctx
, ASE_MT
);
3818 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
3822 check_insn(env
, ctx
, ASE_MT
);
3823 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
3824 rn
= "VPEScheFBack";
3827 check_insn(env
, ctx
, ASE_MT
);
3828 gen_helper_mtc0_vpeopt(arg
);
3838 gen_helper_mtc0_entrylo0(arg
);
3842 check_insn(env
, ctx
, ASE_MT
);
3843 gen_helper_mtc0_tcstatus(arg
);
3847 check_insn(env
, ctx
, ASE_MT
);
3848 gen_helper_mtc0_tcbind(arg
);
3852 check_insn(env
, ctx
, ASE_MT
);
3853 gen_helper_mtc0_tcrestart(arg
);
3857 check_insn(env
, ctx
, ASE_MT
);
3858 gen_helper_mtc0_tchalt(arg
);
3862 check_insn(env
, ctx
, ASE_MT
);
3863 gen_helper_mtc0_tccontext(arg
);
3867 check_insn(env
, ctx
, ASE_MT
);
3868 gen_helper_mtc0_tcschedule(arg
);
3872 check_insn(env
, ctx
, ASE_MT
);
3873 gen_helper_mtc0_tcschefback(arg
);
3883 gen_helper_mtc0_entrylo1(arg
);
3893 gen_helper_mtc0_context(arg
);
3897 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
3898 rn
= "ContextConfig";
3907 gen_helper_mtc0_pagemask(arg
);
3911 check_insn(env
, ctx
, ISA_MIPS32R2
);
3912 gen_helper_mtc0_pagegrain(arg
);
3922 gen_helper_mtc0_wired(arg
);
3926 check_insn(env
, ctx
, ISA_MIPS32R2
);
3927 gen_helper_mtc0_srsconf0(arg
);
3931 check_insn(env
, ctx
, ISA_MIPS32R2
);
3932 gen_helper_mtc0_srsconf1(arg
);
3936 check_insn(env
, ctx
, ISA_MIPS32R2
);
3937 gen_helper_mtc0_srsconf2(arg
);
3941 check_insn(env
, ctx
, ISA_MIPS32R2
);
3942 gen_helper_mtc0_srsconf3(arg
);
3946 check_insn(env
, ctx
, ISA_MIPS32R2
);
3947 gen_helper_mtc0_srsconf4(arg
);
3957 check_insn(env
, ctx
, ISA_MIPS32R2
);
3958 gen_helper_mtc0_hwrena(arg
);
3972 gen_helper_mtc0_count(arg
);
3975 /* 6,7 are implementation dependent */
3983 gen_helper_mtc0_entryhi(arg
);
3993 gen_helper_mtc0_compare(arg
);
3996 /* 6,7 are implementation dependent */
4004 save_cpu_state(ctx
, 1);
4005 gen_helper_mtc0_status(arg
);
4006 /* BS_STOP isn't good enough here, hflags may have changed. */
4007 gen_save_pc(ctx
->pc
+ 4);
4008 ctx
->bstate
= BS_EXCP
;
4012 check_insn(env
, ctx
, ISA_MIPS32R2
);
4013 gen_helper_mtc0_intctl(arg
);
4014 /* Stop translation as we may have switched the execution mode */
4015 ctx
->bstate
= BS_STOP
;
4019 check_insn(env
, ctx
, ISA_MIPS32R2
);
4020 gen_helper_mtc0_srsctl(arg
);
4021 /* Stop translation as we may have switched the execution mode */
4022 ctx
->bstate
= BS_STOP
;
4026 check_insn(env
, ctx
, ISA_MIPS32R2
);
4027 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4028 /* Stop translation as we may have switched the execution mode */
4029 ctx
->bstate
= BS_STOP
;
4039 save_cpu_state(ctx
, 1);
4040 gen_helper_mtc0_cause(arg
);
4050 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_EPC
));
4064 check_insn(env
, ctx
, ISA_MIPS32R2
);
4065 gen_helper_mtc0_ebase(arg
);
4075 gen_helper_mtc0_config0(arg
);
4077 /* Stop translation as we may have switched the execution mode */
4078 ctx
->bstate
= BS_STOP
;
4081 /* ignored, read only */
4085 gen_helper_mtc0_config2(arg
);
4087 /* Stop translation as we may have switched the execution mode */
4088 ctx
->bstate
= BS_STOP
;
4091 /* ignored, read only */
4094 /* 4,5 are reserved */
4095 /* 6,7 are implementation dependent */
4105 rn
= "Invalid config selector";
4112 gen_helper_mtc0_lladdr(arg
);
4122 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
4132 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
4142 #if defined(TARGET_MIPS64)
4143 check_insn(env
, ctx
, ISA_MIPS3
);
4144 gen_helper_mtc0_xcontext(arg
);
4153 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4156 gen_helper_mtc0_framemask(arg
);
4165 rn
= "Diagnostic"; /* implementation dependent */
4170 gen_helper_mtc0_debug(arg
); /* EJTAG support */
4171 /* BS_STOP isn't good enough here, hflags may have changed. */
4172 gen_save_pc(ctx
->pc
+ 4);
4173 ctx
->bstate
= BS_EXCP
;
4177 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
4178 rn
= "TraceControl";
4179 /* Stop translation as we may have switched the execution mode */
4180 ctx
->bstate
= BS_STOP
;
4183 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
4184 rn
= "TraceControl2";
4185 /* Stop translation as we may have switched the execution mode */
4186 ctx
->bstate
= BS_STOP
;
4189 /* Stop translation as we may have switched the execution mode */
4190 ctx
->bstate
= BS_STOP
;
4191 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
4192 rn
= "UserTraceData";
4193 /* Stop translation as we may have switched the execution mode */
4194 ctx
->bstate
= BS_STOP
;
4197 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
4198 /* Stop translation as we may have switched the execution mode */
4199 ctx
->bstate
= BS_STOP
;
4210 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_DEPC
));
4220 gen_helper_mtc0_performance0(arg
);
4221 rn
= "Performance0";
4224 // gen_helper_mtc0_performance1(arg);
4225 rn
= "Performance1";
4228 // gen_helper_mtc0_performance2(arg);
4229 rn
= "Performance2";
4232 // gen_helper_mtc0_performance3(arg);
4233 rn
= "Performance3";
4236 // gen_helper_mtc0_performance4(arg);
4237 rn
= "Performance4";
4240 // gen_helper_mtc0_performance5(arg);
4241 rn
= "Performance5";
4244 // gen_helper_mtc0_performance6(arg);
4245 rn
= "Performance6";
4248 // gen_helper_mtc0_performance7(arg);
4249 rn
= "Performance7";
4275 gen_helper_mtc0_taglo(arg
);
4282 gen_helper_mtc0_datalo(arg
);
4295 gen_helper_mtc0_taghi(arg
);
4302 gen_helper_mtc0_datahi(arg
);
4313 gen_mtc0_store64(arg
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4324 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4330 /* Stop translation as we may have switched the execution mode */
4331 ctx
->bstate
= BS_STOP
;
4336 (void)rn
; /* avoid a compiler warning */
4337 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4338 /* For simplicity assume that all writes can cause interrupts. */
4341 ctx
->bstate
= BS_STOP
;
4346 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4347 generate_exception(ctx
, EXCP_RI
);
4350 #if defined(TARGET_MIPS64)
4351 static void gen_dmfc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4353 const char *rn
= "invalid";
4356 check_insn(env
, ctx
, ISA_MIPS64
);
4362 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4366 check_insn(env
, ctx
, ASE_MT
);
4367 gen_helper_mfc0_mvpcontrol(arg
);
4371 check_insn(env
, ctx
, ASE_MT
);
4372 gen_helper_mfc0_mvpconf0(arg
);
4376 check_insn(env
, ctx
, ASE_MT
);
4377 gen_helper_mfc0_mvpconf1(arg
);
4387 gen_helper_mfc0_random(arg
);
4391 check_insn(env
, ctx
, ASE_MT
);
4392 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4396 check_insn(env
, ctx
, ASE_MT
);
4397 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
4401 check_insn(env
, ctx
, ASE_MT
);
4402 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
4406 check_insn(env
, ctx
, ASE_MT
);
4407 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
4411 check_insn(env
, ctx
, ASE_MT
);
4412 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4416 check_insn(env
, ctx
, ASE_MT
);
4417 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4418 rn
= "VPEScheFBack";
4421 check_insn(env
, ctx
, ASE_MT
);
4422 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
4432 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4436 check_insn(env
, ctx
, ASE_MT
);
4437 gen_helper_mfc0_tcstatus(arg
);
4441 check_insn(env
, ctx
, ASE_MT
);
4442 gen_helper_mfc0_tcbind(arg
);
4446 check_insn(env
, ctx
, ASE_MT
);
4447 gen_helper_dmfc0_tcrestart(arg
);
4451 check_insn(env
, ctx
, ASE_MT
);
4452 gen_helper_dmfc0_tchalt(arg
);
4456 check_insn(env
, ctx
, ASE_MT
);
4457 gen_helper_dmfc0_tccontext(arg
);
4461 check_insn(env
, ctx
, ASE_MT
);
4462 gen_helper_dmfc0_tcschedule(arg
);
4466 check_insn(env
, ctx
, ASE_MT
);
4467 gen_helper_dmfc0_tcschefback(arg
);
4477 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4487 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
4491 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
4492 rn
= "ContextConfig";
4501 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
4505 check_insn(env
, ctx
, ISA_MIPS32R2
);
4506 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
4516 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
4520 check_insn(env
, ctx
, ISA_MIPS32R2
);
4521 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
4525 check_insn(env
, ctx
, ISA_MIPS32R2
);
4526 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
4530 check_insn(env
, ctx
, ISA_MIPS32R2
);
4531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
4535 check_insn(env
, ctx
, ISA_MIPS32R2
);
4536 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
4540 check_insn(env
, ctx
, ISA_MIPS32R2
);
4541 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
4551 check_insn(env
, ctx
, ISA_MIPS32R2
);
4552 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
4562 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
4572 /* Mark as an IO operation because we read the time. */
4575 gen_helper_mfc0_count(arg
);
4579 /* Break the TB to be able to take timer interrupts immediately
4580 after reading count. */
4581 ctx
->bstate
= BS_STOP
;
4584 /* 6,7 are implementation dependent */
4592 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
4602 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
4605 /* 6,7 are implementation dependent */
4613 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
4617 check_insn(env
, ctx
, ISA_MIPS32R2
);
4618 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
4622 check_insn(env
, ctx
, ISA_MIPS32R2
);
4623 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
4627 check_insn(env
, ctx
, ISA_MIPS32R2
);
4628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
4638 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
4648 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
4658 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
4662 check_insn(env
, ctx
, ISA_MIPS32R2
);
4663 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
4673 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
4677 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
4681 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
4685 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
4688 /* 6,7 are implementation dependent */
4690 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
4694 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
4704 gen_helper_dmfc0_lladdr(arg
);
4714 gen_helper_1i(dmfc0_watchlo
, arg
, sel
);
4724 gen_helper_1i(mfc0_watchhi
, arg
, sel
);
4734 check_insn(env
, ctx
, ISA_MIPS3
);
4735 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
4743 /* Officially reserved, but sel 0 is used for R1x000 framemask */
4746 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
4754 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4755 rn
= "'Diagnostic"; /* implementation dependent */
4760 gen_helper_mfc0_debug(arg
); /* EJTAG support */
4764 // gen_helper_dmfc0_tracecontrol(arg); /* PDtrace support */
4765 rn
= "TraceControl";
4768 // gen_helper_dmfc0_tracecontrol2(arg); /* PDtrace support */
4769 rn
= "TraceControl2";
4772 // gen_helper_dmfc0_usertracedata(arg); /* PDtrace support */
4773 rn
= "UserTraceData";
4776 // gen_helper_dmfc0_tracebpc(arg); /* PDtrace support */
4787 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
4797 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
4798 rn
= "Performance0";
4801 // gen_helper_dmfc0_performance1(arg);
4802 rn
= "Performance1";
4805 // gen_helper_dmfc0_performance2(arg);
4806 rn
= "Performance2";
4809 // gen_helper_dmfc0_performance3(arg);
4810 rn
= "Performance3";
4813 // gen_helper_dmfc0_performance4(arg);
4814 rn
= "Performance4";
4817 // gen_helper_dmfc0_performance5(arg);
4818 rn
= "Performance5";
4821 // gen_helper_dmfc0_performance6(arg);
4822 rn
= "Performance6";
4825 // gen_helper_dmfc0_performance7(arg);
4826 rn
= "Performance7";
4833 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4840 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
4853 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4860 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
4873 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
4880 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
4890 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
4901 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
4911 (void)rn
; /* avoid a compiler warning */
4912 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4916 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4917 generate_exception(ctx
, EXCP_RI
);
4920 static void gen_dmtc0 (CPUMIPSState
*env
, DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4922 const char *rn
= "invalid";
4925 check_insn(env
, ctx
, ISA_MIPS64
);
4934 gen_helper_mtc0_index(arg
);
4938 check_insn(env
, ctx
, ASE_MT
);
4939 gen_helper_mtc0_mvpcontrol(arg
);
4943 check_insn(env
, ctx
, ASE_MT
);
4948 check_insn(env
, ctx
, ASE_MT
);
4963 check_insn(env
, ctx
, ASE_MT
);
4964 gen_helper_mtc0_vpecontrol(arg
);
4968 check_insn(env
, ctx
, ASE_MT
);
4969 gen_helper_mtc0_vpeconf0(arg
);
4973 check_insn(env
, ctx
, ASE_MT
);
4974 gen_helper_mtc0_vpeconf1(arg
);
4978 check_insn(env
, ctx
, ASE_MT
);
4979 gen_helper_mtc0_yqmask(arg
);
4983 check_insn(env
, ctx
, ASE_MT
);
4984 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
4988 check_insn(env
, ctx
, ASE_MT
);
4989 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
4990 rn
= "VPEScheFBack";
4993 check_insn(env
, ctx
, ASE_MT
);
4994 gen_helper_mtc0_vpeopt(arg
);
5004 gen_helper_mtc0_entrylo0(arg
);
5008 check_insn(env
, ctx
, ASE_MT
);
5009 gen_helper_mtc0_tcstatus(arg
);
5013 check_insn(env
, ctx
, ASE_MT
);
5014 gen_helper_mtc0_tcbind(arg
);
5018 check_insn(env
, ctx
, ASE_MT
);
5019 gen_helper_mtc0_tcrestart(arg
);
5023 check_insn(env
, ctx
, ASE_MT
);
5024 gen_helper_mtc0_tchalt(arg
);
5028 check_insn(env
, ctx
, ASE_MT
);
5029 gen_helper_mtc0_tccontext(arg
);
5033 check_insn(env
, ctx
, ASE_MT
);
5034 gen_helper_mtc0_tcschedule(arg
);
5038 check_insn(env
, ctx
, ASE_MT
);
5039 gen_helper_mtc0_tcschefback(arg
);
5049 gen_helper_mtc0_entrylo1(arg
);
5059 gen_helper_mtc0_context(arg
);
5063 // gen_helper_mtc0_contextconfig(arg); /* SmartMIPS ASE */
5064 rn
= "ContextConfig";
5073 gen_helper_mtc0_pagemask(arg
);
5077 check_insn(env
, ctx
, ISA_MIPS32R2
);
5078 gen_helper_mtc0_pagegrain(arg
);
5088 gen_helper_mtc0_wired(arg
);
5092 check_insn(env
, ctx
, ISA_MIPS32R2
);
5093 gen_helper_mtc0_srsconf0(arg
);
5097 check_insn(env
, ctx
, ISA_MIPS32R2
);
5098 gen_helper_mtc0_srsconf1(arg
);
5102 check_insn(env
, ctx
, ISA_MIPS32R2
);
5103 gen_helper_mtc0_srsconf2(arg
);
5107 check_insn(env
, ctx
, ISA_MIPS32R2
);
5108 gen_helper_mtc0_srsconf3(arg
);
5112 check_insn(env
, ctx
, ISA_MIPS32R2
);
5113 gen_helper_mtc0_srsconf4(arg
);
5123 check_insn(env
, ctx
, ISA_MIPS32R2
);
5124 gen_helper_mtc0_hwrena(arg
);
5138 gen_helper_mtc0_count(arg
);
5141 /* 6,7 are implementation dependent */
5145 /* Stop translation as we may have switched the execution mode */
5146 ctx
->bstate
= BS_STOP
;
5151 gen_helper_mtc0_entryhi(arg
);
5161 gen_helper_mtc0_compare(arg
);
5164 /* 6,7 are implementation dependent */
5168 /* Stop translation as we may have switched the execution mode */
5169 ctx
->bstate
= BS_STOP
;
5174 save_cpu_state(ctx
, 1);
5175 gen_helper_mtc0_status(arg
);
5176 /* BS_STOP isn't good enough here, hflags may have changed. */
5177 gen_save_pc(ctx
->pc
+ 4);
5178 ctx
->bstate
= BS_EXCP
;
5182 check_insn(env
, ctx
, ISA_MIPS32R2
);
5183 gen_helper_mtc0_intctl(arg
);
5184 /* Stop translation as we may have switched the execution mode */
5185 ctx
->bstate
= BS_STOP
;
5189 check_insn(env
, ctx
, ISA_MIPS32R2
);
5190 gen_helper_mtc0_srsctl(arg
);
5191 /* Stop translation as we may have switched the execution mode */
5192 ctx
->bstate
= BS_STOP
;
5196 check_insn(env
, ctx
, ISA_MIPS32R2
);
5197 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5198 /* Stop translation as we may have switched the execution mode */
5199 ctx
->bstate
= BS_STOP
;
5209 save_cpu_state(ctx
, 1);
5210 /* Mark as an IO operation because we may trigger a software
5215 gen_helper_mtc0_cause(arg
);
5219 /* Stop translation as we may have triggered an intetrupt */
5220 ctx
->bstate
= BS_STOP
;
5230 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5244 check_insn(env
, ctx
, ISA_MIPS32R2
);
5245 gen_helper_mtc0_ebase(arg
);
5255 gen_helper_mtc0_config0(arg
);
5257 /* Stop translation as we may have switched the execution mode */
5258 ctx
->bstate
= BS_STOP
;
5261 /* ignored, read only */
5265 gen_helper_mtc0_config2(arg
);
5267 /* Stop translation as we may have switched the execution mode */
5268 ctx
->bstate
= BS_STOP
;
5274 /* 6,7 are implementation dependent */
5276 rn
= "Invalid config selector";
5283 gen_helper_mtc0_lladdr(arg
);
5293 gen_helper_1i(mtc0_watchlo
, arg
, sel
);
5303 gen_helper_1i(mtc0_watchhi
, arg
, sel
);
5313 check_insn(env
, ctx
, ISA_MIPS3
);
5314 gen_helper_mtc0_xcontext(arg
);
5322 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5325 gen_helper_mtc0_framemask(arg
);
5334 rn
= "Diagnostic"; /* implementation dependent */
5339 gen_helper_mtc0_debug(arg
); /* EJTAG support */
5340 /* BS_STOP isn't good enough here, hflags may have changed. */
5341 gen_save_pc(ctx
->pc
+ 4);
5342 ctx
->bstate
= BS_EXCP
;
5346 // gen_helper_mtc0_tracecontrol(arg); /* PDtrace support */
5347 /* Stop translation as we may have switched the execution mode */
5348 ctx
->bstate
= BS_STOP
;
5349 rn
= "TraceControl";
5352 // gen_helper_mtc0_tracecontrol2(arg); /* PDtrace support */
5353 /* Stop translation as we may have switched the execution mode */
5354 ctx
->bstate
= BS_STOP
;
5355 rn
= "TraceControl2";
5358 // gen_helper_mtc0_usertracedata(arg); /* PDtrace support */
5359 /* Stop translation as we may have switched the execution mode */
5360 ctx
->bstate
= BS_STOP
;
5361 rn
= "UserTraceData";
5364 // gen_helper_mtc0_tracebpc(arg); /* PDtrace support */
5365 /* Stop translation as we may have switched the execution mode */
5366 ctx
->bstate
= BS_STOP
;
5377 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5387 gen_helper_mtc0_performance0(arg
);
5388 rn
= "Performance0";
5391 // gen_helper_mtc0_performance1(arg);
5392 rn
= "Performance1";
5395 // gen_helper_mtc0_performance2(arg);
5396 rn
= "Performance2";
5399 // gen_helper_mtc0_performance3(arg);
5400 rn
= "Performance3";
5403 // gen_helper_mtc0_performance4(arg);
5404 rn
= "Performance4";
5407 // gen_helper_mtc0_performance5(arg);
5408 rn
= "Performance5";
5411 // gen_helper_mtc0_performance6(arg);
5412 rn
= "Performance6";
5415 // gen_helper_mtc0_performance7(arg);
5416 rn
= "Performance7";
5442 gen_helper_mtc0_taglo(arg
);
5449 gen_helper_mtc0_datalo(arg
);
5462 gen_helper_mtc0_taghi(arg
);
5469 gen_helper_mtc0_datahi(arg
);
5480 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5491 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5497 /* Stop translation as we may have switched the execution mode */
5498 ctx
->bstate
= BS_STOP
;
5503 (void)rn
; /* avoid a compiler warning */
5504 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5505 /* For simplicity assume that all writes can cause interrupts. */
5508 ctx
->bstate
= BS_STOP
;
5513 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5514 generate_exception(ctx
, EXCP_RI
);
5516 #endif /* TARGET_MIPS64 */
5518 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
5519 int u
, int sel
, int h
)
5521 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5522 TCGv t0
= tcg_temp_local_new();
5524 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5525 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5526 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5527 tcg_gen_movi_tl(t0
, -1);
5528 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5529 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5530 tcg_gen_movi_tl(t0
, -1);
5536 gen_helper_mftc0_vpecontrol(t0
);
5539 gen_helper_mftc0_vpeconf0(t0
);
5549 gen_helper_mftc0_tcstatus(t0
);
5552 gen_helper_mftc0_tcbind(t0
);
5555 gen_helper_mftc0_tcrestart(t0
);
5558 gen_helper_mftc0_tchalt(t0
);
5561 gen_helper_mftc0_tccontext(t0
);
5564 gen_helper_mftc0_tcschedule(t0
);
5567 gen_helper_mftc0_tcschefback(t0
);
5570 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5577 gen_helper_mftc0_entryhi(t0
);
5580 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5586 gen_helper_mftc0_status(t0
);
5589 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5595 gen_helper_mftc0_cause(t0
);
5605 gen_helper_mftc0_epc(t0
);
5615 gen_helper_mftc0_ebase(t0
);
5625 gen_helper_mftc0_configx(t0
, tcg_const_tl(sel
));
5635 gen_helper_mftc0_debug(t0
);
5638 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5643 gen_mfc0(env
, ctx
, t0
, rt
, sel
);
5645 } else switch (sel
) {
5646 /* GPR registers. */
5648 gen_helper_1i(mftgpr
, t0
, rt
);
5650 /* Auxiliary CPU registers */
5654 gen_helper_1i(mftlo
, t0
, 0);
5657 gen_helper_1i(mfthi
, t0
, 0);
5660 gen_helper_1i(mftacx
, t0
, 0);
5663 gen_helper_1i(mftlo
, t0
, 1);
5666 gen_helper_1i(mfthi
, t0
, 1);
5669 gen_helper_1i(mftacx
, t0
, 1);
5672 gen_helper_1i(mftlo
, t0
, 2);
5675 gen_helper_1i(mfthi
, t0
, 2);
5678 gen_helper_1i(mftacx
, t0
, 2);
5681 gen_helper_1i(mftlo
, t0
, 3);
5684 gen_helper_1i(mfthi
, t0
, 3);
5687 gen_helper_1i(mftacx
, t0
, 3);
5690 gen_helper_mftdsp(t0
);
5696 /* Floating point (COP1). */
5698 /* XXX: For now we support only a single FPU context. */
5700 TCGv_i32 fp0
= tcg_temp_new_i32();
5702 gen_load_fpr32(fp0
, rt
);
5703 tcg_gen_ext_i32_tl(t0
, fp0
);
5704 tcg_temp_free_i32(fp0
);
5706 TCGv_i32 fp0
= tcg_temp_new_i32();
5708 gen_load_fpr32h(fp0
, rt
);
5709 tcg_gen_ext_i32_tl(t0
, fp0
);
5710 tcg_temp_free_i32(fp0
);
5714 /* XXX: For now we support only a single FPU context. */
5715 gen_helper_1i(cfc1
, t0
, rt
);
5717 /* COP2: Not implemented. */
5724 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5725 gen_store_gpr(t0
, rd
);
5731 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
5732 generate_exception(ctx
, EXCP_RI
);
5735 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
5736 int u
, int sel
, int h
)
5738 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
5739 TCGv t0
= tcg_temp_local_new();
5741 gen_load_gpr(t0
, rt
);
5742 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
5743 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
5744 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
5746 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
5747 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
5754 gen_helper_mttc0_vpecontrol(t0
);
5757 gen_helper_mttc0_vpeconf0(t0
);
5767 gen_helper_mttc0_tcstatus(t0
);
5770 gen_helper_mttc0_tcbind(t0
);
5773 gen_helper_mttc0_tcrestart(t0
);
5776 gen_helper_mttc0_tchalt(t0
);
5779 gen_helper_mttc0_tccontext(t0
);
5782 gen_helper_mttc0_tcschedule(t0
);
5785 gen_helper_mttc0_tcschefback(t0
);
5788 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5795 gen_helper_mttc0_entryhi(t0
);
5798 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5804 gen_helper_mttc0_status(t0
);
5807 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5813 gen_helper_mttc0_cause(t0
);
5823 gen_helper_mttc0_ebase(t0
);
5833 gen_helper_mttc0_debug(t0
);
5836 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5841 gen_mtc0(env
, ctx
, t0
, rd
, sel
);
5843 } else switch (sel
) {
5844 /* GPR registers. */
5846 gen_helper_1i(mttgpr
, t0
, rd
);
5848 /* Auxiliary CPU registers */
5852 gen_helper_1i(mttlo
, t0
, 0);
5855 gen_helper_1i(mtthi
, t0
, 0);
5858 gen_helper_1i(mttacx
, t0
, 0);
5861 gen_helper_1i(mttlo
, t0
, 1);
5864 gen_helper_1i(mtthi
, t0
, 1);
5867 gen_helper_1i(mttacx
, t0
, 1);
5870 gen_helper_1i(mttlo
, t0
, 2);
5873 gen_helper_1i(mtthi
, t0
, 2);
5876 gen_helper_1i(mttacx
, t0
, 2);
5879 gen_helper_1i(mttlo
, t0
, 3);
5882 gen_helper_1i(mtthi
, t0
, 3);
5885 gen_helper_1i(mttacx
, t0
, 3);
5888 gen_helper_mttdsp(t0
);
5894 /* Floating point (COP1). */
5896 /* XXX: For now we support only a single FPU context. */
5898 TCGv_i32 fp0
= tcg_temp_new_i32();
5900 tcg_gen_trunc_tl_i32(fp0
, t0
);
5901 gen_store_fpr32(fp0
, rd
);
5902 tcg_temp_free_i32(fp0
);
5904 TCGv_i32 fp0
= tcg_temp_new_i32();
5906 tcg_gen_trunc_tl_i32(fp0
, t0
);
5907 gen_store_fpr32h(fp0
, rd
);
5908 tcg_temp_free_i32(fp0
);
5912 /* XXX: For now we support only a single FPU context. */
5913 gen_helper_1i(ctc1
, t0
, rd
);
5915 /* COP2: Not implemented. */
5922 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5928 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
5929 generate_exception(ctx
, EXCP_RI
);
5932 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
5934 const char *opn
= "ldst";
5942 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5947 TCGv t0
= tcg_temp_new();
5949 gen_load_gpr(t0
, rt
);
5950 gen_mtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5955 #if defined(TARGET_MIPS64)
5957 check_insn(env
, ctx
, ISA_MIPS3
);
5962 gen_dmfc0(env
, ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
5966 check_insn(env
, ctx
, ISA_MIPS3
);
5968 TCGv t0
= tcg_temp_new();
5970 gen_load_gpr(t0
, rt
);
5971 gen_dmtc0(env
, ctx
, t0
, rd
, ctx
->opcode
& 0x7);
5978 check_insn(env
, ctx
, ASE_MT
);
5983 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
5984 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5988 check_insn(env
, ctx
, ASE_MT
);
5989 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
5990 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
5995 if (!env
->tlb
->helper_tlbwi
)
6001 if (!env
->tlb
->helper_tlbwr
)
6007 if (!env
->tlb
->helper_tlbp
)
6013 if (!env
->tlb
->helper_tlbr
)
6019 check_insn(env
, ctx
, ISA_MIPS2
);
6021 ctx
->bstate
= BS_EXCP
;
6025 check_insn(env
, ctx
, ISA_MIPS32
);
6026 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
6028 generate_exception(ctx
, EXCP_RI
);
6031 ctx
->bstate
= BS_EXCP
;
6036 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
6037 /* If we get an exception, we want to restart at next instruction */
6039 save_cpu_state(ctx
, 1);
6042 ctx
->bstate
= BS_EXCP
;
6047 generate_exception(ctx
, EXCP_RI
);
6050 (void)opn
; /* avoid a compiler warning */
6051 MIPS_DEBUG("%s %s %d", opn
, regnames
[rt
], rd
);
6053 #endif /* !CONFIG_USER_ONLY */
6055 /* CP1 Branches (before delay slot) */
6056 static void gen_compute_branch1 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op
,
6057 int32_t cc
, int32_t offset
)
6059 target_ulong btarget
;
6060 const char *opn
= "cp1 cond branch";
6061 TCGv_i32 t0
= tcg_temp_new_i32();
6064 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
6066 btarget
= ctx
->pc
+ 4 + offset
;
6070 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6071 tcg_gen_not_i32(t0
, t0
);
6072 tcg_gen_andi_i32(t0
, t0
, 1);
6073 tcg_gen_extu_i32_tl(bcond
, t0
);
6077 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6078 tcg_gen_not_i32(t0
, t0
);
6079 tcg_gen_andi_i32(t0
, t0
, 1);
6080 tcg_gen_extu_i32_tl(bcond
, t0
);
6084 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6085 tcg_gen_andi_i32(t0
, t0
, 1);
6086 tcg_gen_extu_i32_tl(bcond
, t0
);
6090 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6091 tcg_gen_andi_i32(t0
, t0
, 1);
6092 tcg_gen_extu_i32_tl(bcond
, t0
);
6095 ctx
->hflags
|= MIPS_HFLAG_BL
;
6099 TCGv_i32 t1
= tcg_temp_new_i32();
6100 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6101 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6102 tcg_gen_nor_i32(t0
, t0
, t1
);
6103 tcg_temp_free_i32(t1
);
6104 tcg_gen_andi_i32(t0
, t0
, 1);
6105 tcg_gen_extu_i32_tl(bcond
, t0
);
6111 TCGv_i32 t1
= tcg_temp_new_i32();
6112 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6113 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6114 tcg_gen_or_i32(t0
, t0
, t1
);
6115 tcg_temp_free_i32(t1
);
6116 tcg_gen_andi_i32(t0
, t0
, 1);
6117 tcg_gen_extu_i32_tl(bcond
, t0
);
6123 TCGv_i32 t1
= tcg_temp_new_i32();
6124 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6125 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6126 tcg_gen_or_i32(t0
, t0
, t1
);
6127 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6128 tcg_gen_or_i32(t0
, t0
, t1
);
6129 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6130 tcg_gen_nor_i32(t0
, t0
, t1
);
6131 tcg_temp_free_i32(t1
);
6132 tcg_gen_andi_i32(t0
, t0
, 1);
6133 tcg_gen_extu_i32_tl(bcond
, t0
);
6139 TCGv_i32 t1
= tcg_temp_new_i32();
6140 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
6141 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
6142 tcg_gen_or_i32(t0
, t0
, t1
);
6143 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
6144 tcg_gen_or_i32(t0
, t0
, t1
);
6145 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
6146 tcg_gen_or_i32(t0
, t0
, t1
);
6147 tcg_temp_free_i32(t1
);
6148 tcg_gen_andi_i32(t0
, t0
, 1);
6149 tcg_gen_extu_i32_tl(bcond
, t0
);
6153 ctx
->hflags
|= MIPS_HFLAG_BC
;
6157 generate_exception (ctx
, EXCP_RI
);
6160 (void)opn
; /* avoid a compiler warning */
6161 MIPS_DEBUG("%s: cond %02x target " TARGET_FMT_lx
, opn
,
6162 ctx
->hflags
, btarget
);
6163 ctx
->btarget
= btarget
;
6166 tcg_temp_free_i32(t0
);
6169 /* Coprocessor 1 (FPU) */
6171 #define FOP(func, fmt) (((fmt) << 21) | (func))
6174 OPC_ADD_S
= FOP(0, FMT_S
),
6175 OPC_SUB_S
= FOP(1, FMT_S
),
6176 OPC_MUL_S
= FOP(2, FMT_S
),
6177 OPC_DIV_S
= FOP(3, FMT_S
),
6178 OPC_SQRT_S
= FOP(4, FMT_S
),
6179 OPC_ABS_S
= FOP(5, FMT_S
),
6180 OPC_MOV_S
= FOP(6, FMT_S
),
6181 OPC_NEG_S
= FOP(7, FMT_S
),
6182 OPC_ROUND_L_S
= FOP(8, FMT_S
),
6183 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
6184 OPC_CEIL_L_S
= FOP(10, FMT_S
),
6185 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
6186 OPC_ROUND_W_S
= FOP(12, FMT_S
),
6187 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
6188 OPC_CEIL_W_S
= FOP(14, FMT_S
),
6189 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
6190 OPC_MOVCF_S
= FOP(17, FMT_S
),
6191 OPC_MOVZ_S
= FOP(18, FMT_S
),
6192 OPC_MOVN_S
= FOP(19, FMT_S
),
6193 OPC_RECIP_S
= FOP(21, FMT_S
),
6194 OPC_RSQRT_S
= FOP(22, FMT_S
),
6195 OPC_RECIP2_S
= FOP(28, FMT_S
),
6196 OPC_RECIP1_S
= FOP(29, FMT_S
),
6197 OPC_RSQRT1_S
= FOP(30, FMT_S
),
6198 OPC_RSQRT2_S
= FOP(31, FMT_S
),
6199 OPC_CVT_D_S
= FOP(33, FMT_S
),
6200 OPC_CVT_W_S
= FOP(36, FMT_S
),
6201 OPC_CVT_L_S
= FOP(37, FMT_S
),
6202 OPC_CVT_PS_S
= FOP(38, FMT_S
),
6203 OPC_CMP_F_S
= FOP (48, FMT_S
),
6204 OPC_CMP_UN_S
= FOP (49, FMT_S
),
6205 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
6206 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
6207 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
6208 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
6209 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
6210 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
6211 OPC_CMP_SF_S
= FOP (56, FMT_S
),
6212 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
6213 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
6214 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
6215 OPC_CMP_LT_S
= FOP (60, FMT_S
),
6216 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
6217 OPC_CMP_LE_S
= FOP (62, FMT_S
),
6218 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
6220 OPC_ADD_D
= FOP(0, FMT_D
),
6221 OPC_SUB_D
= FOP(1, FMT_D
),
6222 OPC_MUL_D
= FOP(2, FMT_D
),
6223 OPC_DIV_D
= FOP(3, FMT_D
),
6224 OPC_SQRT_D
= FOP(4, FMT_D
),
6225 OPC_ABS_D
= FOP(5, FMT_D
),
6226 OPC_MOV_D
= FOP(6, FMT_D
),
6227 OPC_NEG_D
= FOP(7, FMT_D
),
6228 OPC_ROUND_L_D
= FOP(8, FMT_D
),
6229 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
6230 OPC_CEIL_L_D
= FOP(10, FMT_D
),
6231 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
6232 OPC_ROUND_W_D
= FOP(12, FMT_D
),
6233 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
6234 OPC_CEIL_W_D
= FOP(14, FMT_D
),
6235 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
6236 OPC_MOVCF_D
= FOP(17, FMT_D
),
6237 OPC_MOVZ_D
= FOP(18, FMT_D
),
6238 OPC_MOVN_D
= FOP(19, FMT_D
),
6239 OPC_RECIP_D
= FOP(21, FMT_D
),
6240 OPC_RSQRT_D
= FOP(22, FMT_D
),
6241 OPC_RECIP2_D
= FOP(28, FMT_D
),
6242 OPC_RECIP1_D
= FOP(29, FMT_D
),
6243 OPC_RSQRT1_D
= FOP(30, FMT_D
),
6244 OPC_RSQRT2_D
= FOP(31, FMT_D
),
6245 OPC_CVT_S_D
= FOP(32, FMT_D
),
6246 OPC_CVT_W_D
= FOP(36, FMT_D
),
6247 OPC_CVT_L_D
= FOP(37, FMT_D
),
6248 OPC_CMP_F_D
= FOP (48, FMT_D
),
6249 OPC_CMP_UN_D
= FOP (49, FMT_D
),
6250 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
6251 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
6252 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
6253 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
6254 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
6255 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
6256 OPC_CMP_SF_D
= FOP (56, FMT_D
),
6257 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
6258 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
6259 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
6260 OPC_CMP_LT_D
= FOP (60, FMT_D
),
6261 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
6262 OPC_CMP_LE_D
= FOP (62, FMT_D
),
6263 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
6265 OPC_CVT_S_W
= FOP(32, FMT_W
),
6266 OPC_CVT_D_W
= FOP(33, FMT_W
),
6267 OPC_CVT_S_L
= FOP(32, FMT_L
),
6268 OPC_CVT_D_L
= FOP(33, FMT_L
),
6269 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
6271 OPC_ADD_PS
= FOP(0, FMT_PS
),
6272 OPC_SUB_PS
= FOP(1, FMT_PS
),
6273 OPC_MUL_PS
= FOP(2, FMT_PS
),
6274 OPC_DIV_PS
= FOP(3, FMT_PS
),
6275 OPC_ABS_PS
= FOP(5, FMT_PS
),
6276 OPC_MOV_PS
= FOP(6, FMT_PS
),
6277 OPC_NEG_PS
= FOP(7, FMT_PS
),
6278 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
6279 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
6280 OPC_MOVN_PS
= FOP(19, FMT_PS
),
6281 OPC_ADDR_PS
= FOP(24, FMT_PS
),
6282 OPC_MULR_PS
= FOP(26, FMT_PS
),
6283 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
6284 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
6285 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
6286 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
6288 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
6289 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
6290 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
6291 OPC_PLL_PS
= FOP(44, FMT_PS
),
6292 OPC_PLU_PS
= FOP(45, FMT_PS
),
6293 OPC_PUL_PS
= FOP(46, FMT_PS
),
6294 OPC_PUU_PS
= FOP(47, FMT_PS
),
6295 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
6296 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
6297 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
6298 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
6299 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
6300 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
6301 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
6302 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
6303 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
6304 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
6305 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
6306 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
6307 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
6308 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
6309 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
6310 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
6313 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
6315 const char *opn
= "cp1 move";
6316 TCGv t0
= tcg_temp_new();
6321 TCGv_i32 fp0
= tcg_temp_new_i32();
6323 gen_load_fpr32(fp0
, fs
);
6324 tcg_gen_ext_i32_tl(t0
, fp0
);
6325 tcg_temp_free_i32(fp0
);
6327 gen_store_gpr(t0
, rt
);
6331 gen_load_gpr(t0
, rt
);
6333 TCGv_i32 fp0
= tcg_temp_new_i32();
6335 tcg_gen_trunc_tl_i32(fp0
, t0
);
6336 gen_store_fpr32(fp0
, fs
);
6337 tcg_temp_free_i32(fp0
);
6342 gen_helper_1i(cfc1
, t0
, fs
);
6343 gen_store_gpr(t0
, rt
);
6347 gen_load_gpr(t0
, rt
);
6348 gen_helper_1i(ctc1
, t0
, fs
);
6351 #if defined(TARGET_MIPS64)
6353 gen_load_fpr64(ctx
, t0
, fs
);
6354 gen_store_gpr(t0
, rt
);
6358 gen_load_gpr(t0
, rt
);
6359 gen_store_fpr64(ctx
, t0
, fs
);
6365 TCGv_i32 fp0
= tcg_temp_new_i32();
6367 gen_load_fpr32h(fp0
, fs
);
6368 tcg_gen_ext_i32_tl(t0
, fp0
);
6369 tcg_temp_free_i32(fp0
);
6371 gen_store_gpr(t0
, rt
);
6375 gen_load_gpr(t0
, rt
);
6377 TCGv_i32 fp0
= tcg_temp_new_i32();
6379 tcg_gen_trunc_tl_i32(fp0
, t0
);
6380 gen_store_fpr32h(fp0
, fs
);
6381 tcg_temp_free_i32(fp0
);
6387 generate_exception (ctx
, EXCP_RI
);
6390 (void)opn
; /* avoid a compiler warning */
6391 MIPS_DEBUG("%s %s %s", opn
, regnames
[rt
], fregnames
[fs
]);
6397 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
6413 l1
= gen_new_label();
6414 t0
= tcg_temp_new_i32();
6415 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6416 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6417 tcg_temp_free_i32(t0
);
6419 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
6421 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
6426 static inline void gen_movcf_s (int fs
, int fd
, int cc
, int tf
)
6429 TCGv_i32 t0
= tcg_temp_new_i32();
6430 int l1
= gen_new_label();
6437 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6438 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6439 gen_load_fpr32(t0
, fs
);
6440 gen_store_fpr32(t0
, fd
);
6442 tcg_temp_free_i32(t0
);
6445 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
6448 TCGv_i32 t0
= tcg_temp_new_i32();
6450 int l1
= gen_new_label();
6457 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6458 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6459 tcg_temp_free_i32(t0
);
6460 fp0
= tcg_temp_new_i64();
6461 gen_load_fpr64(ctx
, fp0
, fs
);
6462 gen_store_fpr64(ctx
, fp0
, fd
);
6463 tcg_temp_free_i64(fp0
);
6467 static inline void gen_movcf_ps (int fs
, int fd
, int cc
, int tf
)
6470 TCGv_i32 t0
= tcg_temp_new_i32();
6471 int l1
= gen_new_label();
6472 int l2
= gen_new_label();
6479 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
6480 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
6481 gen_load_fpr32(t0
, fs
);
6482 gen_store_fpr32(t0
, fd
);
6485 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
6486 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
6487 gen_load_fpr32h(t0
, fs
);
6488 gen_store_fpr32h(t0
, fd
);
6489 tcg_temp_free_i32(t0
);
6494 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
6495 int ft
, int fs
, int fd
, int cc
)
6497 const char *opn
= "farith";
6498 const char *condnames
[] = {
6516 const char *condnames_abs
[] = {
6534 enum { BINOP
, CMPOP
, OTHEROP
} optype
= OTHEROP
;
6535 uint32_t func
= ctx
->opcode
& 0x3f;
6540 TCGv_i32 fp0
= tcg_temp_new_i32();
6541 TCGv_i32 fp1
= tcg_temp_new_i32();
6543 gen_load_fpr32(fp0
, fs
);
6544 gen_load_fpr32(fp1
, ft
);
6545 gen_helper_float_add_s(fp0
, fp0
, fp1
);
6546 tcg_temp_free_i32(fp1
);
6547 gen_store_fpr32(fp0
, fd
);
6548 tcg_temp_free_i32(fp0
);
6555 TCGv_i32 fp0
= tcg_temp_new_i32();
6556 TCGv_i32 fp1
= tcg_temp_new_i32();
6558 gen_load_fpr32(fp0
, fs
);
6559 gen_load_fpr32(fp1
, ft
);
6560 gen_helper_float_sub_s(fp0
, fp0
, fp1
);
6561 tcg_temp_free_i32(fp1
);
6562 gen_store_fpr32(fp0
, fd
);
6563 tcg_temp_free_i32(fp0
);
6570 TCGv_i32 fp0
= tcg_temp_new_i32();
6571 TCGv_i32 fp1
= tcg_temp_new_i32();
6573 gen_load_fpr32(fp0
, fs
);
6574 gen_load_fpr32(fp1
, ft
);
6575 gen_helper_float_mul_s(fp0
, fp0
, fp1
);
6576 tcg_temp_free_i32(fp1
);
6577 gen_store_fpr32(fp0
, fd
);
6578 tcg_temp_free_i32(fp0
);
6585 TCGv_i32 fp0
= tcg_temp_new_i32();
6586 TCGv_i32 fp1
= tcg_temp_new_i32();
6588 gen_load_fpr32(fp0
, fs
);
6589 gen_load_fpr32(fp1
, ft
);
6590 gen_helper_float_div_s(fp0
, fp0
, fp1
);
6591 tcg_temp_free_i32(fp1
);
6592 gen_store_fpr32(fp0
, fd
);
6593 tcg_temp_free_i32(fp0
);
6600 TCGv_i32 fp0
= tcg_temp_new_i32();
6602 gen_load_fpr32(fp0
, fs
);
6603 gen_helper_float_sqrt_s(fp0
, fp0
);
6604 gen_store_fpr32(fp0
, fd
);
6605 tcg_temp_free_i32(fp0
);
6611 TCGv_i32 fp0
= tcg_temp_new_i32();
6613 gen_load_fpr32(fp0
, fs
);
6614 gen_helper_float_abs_s(fp0
, fp0
);
6615 gen_store_fpr32(fp0
, fd
);
6616 tcg_temp_free_i32(fp0
);
6622 TCGv_i32 fp0
= tcg_temp_new_i32();
6624 gen_load_fpr32(fp0
, fs
);
6625 gen_store_fpr32(fp0
, fd
);
6626 tcg_temp_free_i32(fp0
);
6632 TCGv_i32 fp0
= tcg_temp_new_i32();
6634 gen_load_fpr32(fp0
, fs
);
6635 gen_helper_float_chs_s(fp0
, fp0
);
6636 gen_store_fpr32(fp0
, fd
);
6637 tcg_temp_free_i32(fp0
);
6642 check_cp1_64bitmode(ctx
);
6644 TCGv_i32 fp32
= tcg_temp_new_i32();
6645 TCGv_i64 fp64
= tcg_temp_new_i64();
6647 gen_load_fpr32(fp32
, fs
);
6648 gen_helper_float_roundl_s(fp64
, fp32
);
6649 tcg_temp_free_i32(fp32
);
6650 gen_store_fpr64(ctx
, fp64
, fd
);
6651 tcg_temp_free_i64(fp64
);
6656 check_cp1_64bitmode(ctx
);
6658 TCGv_i32 fp32
= tcg_temp_new_i32();
6659 TCGv_i64 fp64
= tcg_temp_new_i64();
6661 gen_load_fpr32(fp32
, fs
);
6662 gen_helper_float_truncl_s(fp64
, fp32
);
6663 tcg_temp_free_i32(fp32
);
6664 gen_store_fpr64(ctx
, fp64
, fd
);
6665 tcg_temp_free_i64(fp64
);
6670 check_cp1_64bitmode(ctx
);
6672 TCGv_i32 fp32
= tcg_temp_new_i32();
6673 TCGv_i64 fp64
= tcg_temp_new_i64();
6675 gen_load_fpr32(fp32
, fs
);
6676 gen_helper_float_ceill_s(fp64
, fp32
);
6677 tcg_temp_free_i32(fp32
);
6678 gen_store_fpr64(ctx
, fp64
, fd
);
6679 tcg_temp_free_i64(fp64
);
6684 check_cp1_64bitmode(ctx
);
6686 TCGv_i32 fp32
= tcg_temp_new_i32();
6687 TCGv_i64 fp64
= tcg_temp_new_i64();
6689 gen_load_fpr32(fp32
, fs
);
6690 gen_helper_float_floorl_s(fp64
, fp32
);
6691 tcg_temp_free_i32(fp32
);
6692 gen_store_fpr64(ctx
, fp64
, fd
);
6693 tcg_temp_free_i64(fp64
);
6699 TCGv_i32 fp0
= tcg_temp_new_i32();
6701 gen_load_fpr32(fp0
, fs
);
6702 gen_helper_float_roundw_s(fp0
, fp0
);
6703 gen_store_fpr32(fp0
, fd
);
6704 tcg_temp_free_i32(fp0
);
6710 TCGv_i32 fp0
= tcg_temp_new_i32();
6712 gen_load_fpr32(fp0
, fs
);
6713 gen_helper_float_truncw_s(fp0
, fp0
);
6714 gen_store_fpr32(fp0
, fd
);
6715 tcg_temp_free_i32(fp0
);
6721 TCGv_i32 fp0
= tcg_temp_new_i32();
6723 gen_load_fpr32(fp0
, fs
);
6724 gen_helper_float_ceilw_s(fp0
, fp0
);
6725 gen_store_fpr32(fp0
, fd
);
6726 tcg_temp_free_i32(fp0
);
6732 TCGv_i32 fp0
= tcg_temp_new_i32();
6734 gen_load_fpr32(fp0
, fs
);
6735 gen_helper_float_floorw_s(fp0
, fp0
);
6736 gen_store_fpr32(fp0
, fd
);
6737 tcg_temp_free_i32(fp0
);
6742 gen_movcf_s(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
6747 int l1
= gen_new_label();
6751 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
6753 fp0
= tcg_temp_new_i32();
6754 gen_load_fpr32(fp0
, fs
);
6755 gen_store_fpr32(fp0
, fd
);
6756 tcg_temp_free_i32(fp0
);
6763 int l1
= gen_new_label();
6767 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
6768 fp0
= tcg_temp_new_i32();
6769 gen_load_fpr32(fp0
, fs
);
6770 gen_store_fpr32(fp0
, fd
);
6771 tcg_temp_free_i32(fp0
);
6780 TCGv_i32 fp0
= tcg_temp_new_i32();
6782 gen_load_fpr32(fp0
, fs
);
6783 gen_helper_float_recip_s(fp0
, fp0
);
6784 gen_store_fpr32(fp0
, fd
);
6785 tcg_temp_free_i32(fp0
);
6792 TCGv_i32 fp0
= tcg_temp_new_i32();
6794 gen_load_fpr32(fp0
, fs
);
6795 gen_helper_float_rsqrt_s(fp0
, fp0
);
6796 gen_store_fpr32(fp0
, fd
);
6797 tcg_temp_free_i32(fp0
);
6802 check_cp1_64bitmode(ctx
);
6804 TCGv_i32 fp0
= tcg_temp_new_i32();
6805 TCGv_i32 fp1
= tcg_temp_new_i32();
6807 gen_load_fpr32(fp0
, fs
);
6808 gen_load_fpr32(fp1
, fd
);
6809 gen_helper_float_recip2_s(fp0
, fp0
, fp1
);
6810 tcg_temp_free_i32(fp1
);
6811 gen_store_fpr32(fp0
, fd
);
6812 tcg_temp_free_i32(fp0
);
6817 check_cp1_64bitmode(ctx
);
6819 TCGv_i32 fp0
= tcg_temp_new_i32();
6821 gen_load_fpr32(fp0
, fs
);
6822 gen_helper_float_recip1_s(fp0
, fp0
);
6823 gen_store_fpr32(fp0
, fd
);
6824 tcg_temp_free_i32(fp0
);
6829 check_cp1_64bitmode(ctx
);
6831 TCGv_i32 fp0
= tcg_temp_new_i32();
6833 gen_load_fpr32(fp0
, fs
);
6834 gen_helper_float_rsqrt1_s(fp0
, fp0
);
6835 gen_store_fpr32(fp0
, fd
);
6836 tcg_temp_free_i32(fp0
);
6841 check_cp1_64bitmode(ctx
);
6843 TCGv_i32 fp0
= tcg_temp_new_i32();
6844 TCGv_i32 fp1
= tcg_temp_new_i32();
6846 gen_load_fpr32(fp0
, fs
);
6847 gen_load_fpr32(fp1
, ft
);
6848 gen_helper_float_rsqrt2_s(fp0
, fp0
, fp1
);
6849 tcg_temp_free_i32(fp1
);
6850 gen_store_fpr32(fp0
, fd
);
6851 tcg_temp_free_i32(fp0
);
6856 check_cp1_registers(ctx
, fd
);
6858 TCGv_i32 fp32
= tcg_temp_new_i32();
6859 TCGv_i64 fp64
= tcg_temp_new_i64();
6861 gen_load_fpr32(fp32
, fs
);
6862 gen_helper_float_cvtd_s(fp64
, fp32
);
6863 tcg_temp_free_i32(fp32
);
6864 gen_store_fpr64(ctx
, fp64
, fd
);
6865 tcg_temp_free_i64(fp64
);
6871 TCGv_i32 fp0
= tcg_temp_new_i32();
6873 gen_load_fpr32(fp0
, fs
);
6874 gen_helper_float_cvtw_s(fp0
, fp0
);
6875 gen_store_fpr32(fp0
, fd
);
6876 tcg_temp_free_i32(fp0
);
6881 check_cp1_64bitmode(ctx
);
6883 TCGv_i32 fp32
= tcg_temp_new_i32();
6884 TCGv_i64 fp64
= tcg_temp_new_i64();
6886 gen_load_fpr32(fp32
, fs
);
6887 gen_helper_float_cvtl_s(fp64
, fp32
);
6888 tcg_temp_free_i32(fp32
);
6889 gen_store_fpr64(ctx
, fp64
, fd
);
6890 tcg_temp_free_i64(fp64
);
6895 check_cp1_64bitmode(ctx
);
6897 TCGv_i64 fp64
= tcg_temp_new_i64();
6898 TCGv_i32 fp32_0
= tcg_temp_new_i32();
6899 TCGv_i32 fp32_1
= tcg_temp_new_i32();
6901 gen_load_fpr32(fp32_0
, fs
);
6902 gen_load_fpr32(fp32_1
, ft
);
6903 tcg_gen_concat_i32_i64(fp64
, fp32_0
, fp32_1
);
6904 tcg_temp_free_i32(fp32_1
);
6905 tcg_temp_free_i32(fp32_0
);
6906 gen_store_fpr64(ctx
, fp64
, fd
);
6907 tcg_temp_free_i64(fp64
);
6920 case OPC_CMP_NGLE_S
:
6927 if (ctx
->opcode
& (1 << 6)) {
6928 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
6929 opn
= condnames_abs
[func
-48];
6931 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
6932 opn
= condnames
[func
-48];
6936 check_cp1_registers(ctx
, fs
| ft
| fd
);
6938 TCGv_i64 fp0
= tcg_temp_new_i64();
6939 TCGv_i64 fp1
= tcg_temp_new_i64();
6941 gen_load_fpr64(ctx
, fp0
, fs
);
6942 gen_load_fpr64(ctx
, fp1
, ft
);
6943 gen_helper_float_add_d(fp0
, fp0
, fp1
);
6944 tcg_temp_free_i64(fp1
);
6945 gen_store_fpr64(ctx
, fp0
, fd
);
6946 tcg_temp_free_i64(fp0
);
6952 check_cp1_registers(ctx
, fs
| ft
| fd
);
6954 TCGv_i64 fp0
= tcg_temp_new_i64();
6955 TCGv_i64 fp1
= tcg_temp_new_i64();
6957 gen_load_fpr64(ctx
, fp0
, fs
);
6958 gen_load_fpr64(ctx
, fp1
, ft
);
6959 gen_helper_float_sub_d(fp0
, fp0
, fp1
);
6960 tcg_temp_free_i64(fp1
);
6961 gen_store_fpr64(ctx
, fp0
, fd
);
6962 tcg_temp_free_i64(fp0
);
6968 check_cp1_registers(ctx
, fs
| ft
| fd
);
6970 TCGv_i64 fp0
= tcg_temp_new_i64();
6971 TCGv_i64 fp1
= tcg_temp_new_i64();
6973 gen_load_fpr64(ctx
, fp0
, fs
);
6974 gen_load_fpr64(ctx
, fp1
, ft
);
6975 gen_helper_float_mul_d(fp0
, fp0
, fp1
);
6976 tcg_temp_free_i64(fp1
);
6977 gen_store_fpr64(ctx
, fp0
, fd
);
6978 tcg_temp_free_i64(fp0
);
6984 check_cp1_registers(ctx
, fs
| ft
| fd
);
6986 TCGv_i64 fp0
= tcg_temp_new_i64();
6987 TCGv_i64 fp1
= tcg_temp_new_i64();
6989 gen_load_fpr64(ctx
, fp0
, fs
);
6990 gen_load_fpr64(ctx
, fp1
, ft
);
6991 gen_helper_float_div_d(fp0
, fp0
, fp1
);
6992 tcg_temp_free_i64(fp1
);
6993 gen_store_fpr64(ctx
, fp0
, fd
);
6994 tcg_temp_free_i64(fp0
);
7000 check_cp1_registers(ctx
, fs
| fd
);
7002 TCGv_i64 fp0
= tcg_temp_new_i64();
7004 gen_load_fpr64(ctx
, fp0
, fs
);
7005 gen_helper_float_sqrt_d(fp0
, fp0
);
7006 gen_store_fpr64(ctx
, fp0
, fd
);
7007 tcg_temp_free_i64(fp0
);
7012 check_cp1_registers(ctx
, fs
| fd
);
7014 TCGv_i64 fp0
= tcg_temp_new_i64();
7016 gen_load_fpr64(ctx
, fp0
, fs
);
7017 gen_helper_float_abs_d(fp0
, fp0
);
7018 gen_store_fpr64(ctx
, fp0
, fd
);
7019 tcg_temp_free_i64(fp0
);
7024 check_cp1_registers(ctx
, fs
| fd
);
7026 TCGv_i64 fp0
= tcg_temp_new_i64();
7028 gen_load_fpr64(ctx
, fp0
, fs
);
7029 gen_store_fpr64(ctx
, fp0
, fd
);
7030 tcg_temp_free_i64(fp0
);
7035 check_cp1_registers(ctx
, fs
| fd
);
7037 TCGv_i64 fp0
= tcg_temp_new_i64();
7039 gen_load_fpr64(ctx
, fp0
, fs
);
7040 gen_helper_float_chs_d(fp0
, fp0
);
7041 gen_store_fpr64(ctx
, fp0
, fd
);
7042 tcg_temp_free_i64(fp0
);
7047 check_cp1_64bitmode(ctx
);
7049 TCGv_i64 fp0
= tcg_temp_new_i64();
7051 gen_load_fpr64(ctx
, fp0
, fs
);
7052 gen_helper_float_roundl_d(fp0
, fp0
);
7053 gen_store_fpr64(ctx
, fp0
, fd
);
7054 tcg_temp_free_i64(fp0
);
7059 check_cp1_64bitmode(ctx
);
7061 TCGv_i64 fp0
= tcg_temp_new_i64();
7063 gen_load_fpr64(ctx
, fp0
, fs
);
7064 gen_helper_float_truncl_d(fp0
, fp0
);
7065 gen_store_fpr64(ctx
, fp0
, fd
);
7066 tcg_temp_free_i64(fp0
);
7071 check_cp1_64bitmode(ctx
);
7073 TCGv_i64 fp0
= tcg_temp_new_i64();
7075 gen_load_fpr64(ctx
, fp0
, fs
);
7076 gen_helper_float_ceill_d(fp0
, fp0
);
7077 gen_store_fpr64(ctx
, fp0
, fd
);
7078 tcg_temp_free_i64(fp0
);
7083 check_cp1_64bitmode(ctx
);
7085 TCGv_i64 fp0
= tcg_temp_new_i64();
7087 gen_load_fpr64(ctx
, fp0
, fs
);
7088 gen_helper_float_floorl_d(fp0
, fp0
);
7089 gen_store_fpr64(ctx
, fp0
, fd
);
7090 tcg_temp_free_i64(fp0
);
7095 check_cp1_registers(ctx
, fs
);
7097 TCGv_i32 fp32
= tcg_temp_new_i32();
7098 TCGv_i64 fp64
= tcg_temp_new_i64();
7100 gen_load_fpr64(ctx
, fp64
, fs
);
7101 gen_helper_float_roundw_d(fp32
, fp64
);
7102 tcg_temp_free_i64(fp64
);
7103 gen_store_fpr32(fp32
, fd
);
7104 tcg_temp_free_i32(fp32
);
7109 check_cp1_registers(ctx
, fs
);
7111 TCGv_i32 fp32
= tcg_temp_new_i32();
7112 TCGv_i64 fp64
= tcg_temp_new_i64();
7114 gen_load_fpr64(ctx
, fp64
, fs
);
7115 gen_helper_float_truncw_d(fp32
, fp64
);
7116 tcg_temp_free_i64(fp64
);
7117 gen_store_fpr32(fp32
, fd
);
7118 tcg_temp_free_i32(fp32
);
7123 check_cp1_registers(ctx
, fs
);
7125 TCGv_i32 fp32
= tcg_temp_new_i32();
7126 TCGv_i64 fp64
= tcg_temp_new_i64();
7128 gen_load_fpr64(ctx
, fp64
, fs
);
7129 gen_helper_float_ceilw_d(fp32
, fp64
);
7130 tcg_temp_free_i64(fp64
);
7131 gen_store_fpr32(fp32
, fd
);
7132 tcg_temp_free_i32(fp32
);
7137 check_cp1_registers(ctx
, fs
);
7139 TCGv_i32 fp32
= tcg_temp_new_i32();
7140 TCGv_i64 fp64
= tcg_temp_new_i64();
7142 gen_load_fpr64(ctx
, fp64
, fs
);
7143 gen_helper_float_floorw_d(fp32
, fp64
);
7144 tcg_temp_free_i64(fp64
);
7145 gen_store_fpr32(fp32
, fd
);
7146 tcg_temp_free_i32(fp32
);
7151 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7156 int l1
= gen_new_label();
7160 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7162 fp0
= tcg_temp_new_i64();
7163 gen_load_fpr64(ctx
, fp0
, fs
);
7164 gen_store_fpr64(ctx
, fp0
, fd
);
7165 tcg_temp_free_i64(fp0
);
7172 int l1
= gen_new_label();
7176 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7177 fp0
= tcg_temp_new_i64();
7178 gen_load_fpr64(ctx
, fp0
, fs
);
7179 gen_store_fpr64(ctx
, fp0
, fd
);
7180 tcg_temp_free_i64(fp0
);
7187 check_cp1_64bitmode(ctx
);
7189 TCGv_i64 fp0
= tcg_temp_new_i64();
7191 gen_load_fpr64(ctx
, fp0
, fs
);
7192 gen_helper_float_recip_d(fp0
, fp0
);
7193 gen_store_fpr64(ctx
, fp0
, fd
);
7194 tcg_temp_free_i64(fp0
);
7199 check_cp1_64bitmode(ctx
);
7201 TCGv_i64 fp0
= tcg_temp_new_i64();
7203 gen_load_fpr64(ctx
, fp0
, fs
);
7204 gen_helper_float_rsqrt_d(fp0
, fp0
);
7205 gen_store_fpr64(ctx
, fp0
, fd
);
7206 tcg_temp_free_i64(fp0
);
7211 check_cp1_64bitmode(ctx
);
7213 TCGv_i64 fp0
= tcg_temp_new_i64();
7214 TCGv_i64 fp1
= tcg_temp_new_i64();
7216 gen_load_fpr64(ctx
, fp0
, fs
);
7217 gen_load_fpr64(ctx
, fp1
, ft
);
7218 gen_helper_float_recip2_d(fp0
, fp0
, fp1
);
7219 tcg_temp_free_i64(fp1
);
7220 gen_store_fpr64(ctx
, fp0
, fd
);
7221 tcg_temp_free_i64(fp0
);
7226 check_cp1_64bitmode(ctx
);
7228 TCGv_i64 fp0
= tcg_temp_new_i64();
7230 gen_load_fpr64(ctx
, fp0
, fs
);
7231 gen_helper_float_recip1_d(fp0
, fp0
);
7232 gen_store_fpr64(ctx
, fp0
, fd
);
7233 tcg_temp_free_i64(fp0
);
7238 check_cp1_64bitmode(ctx
);
7240 TCGv_i64 fp0
= tcg_temp_new_i64();
7242 gen_load_fpr64(ctx
, fp0
, fs
);
7243 gen_helper_float_rsqrt1_d(fp0
, fp0
);
7244 gen_store_fpr64(ctx
, fp0
, fd
);
7245 tcg_temp_free_i64(fp0
);
7250 check_cp1_64bitmode(ctx
);
7252 TCGv_i64 fp0
= tcg_temp_new_i64();
7253 TCGv_i64 fp1
= tcg_temp_new_i64();
7255 gen_load_fpr64(ctx
, fp0
, fs
);
7256 gen_load_fpr64(ctx
, fp1
, ft
);
7257 gen_helper_float_rsqrt2_d(fp0
, fp0
, fp1
);
7258 tcg_temp_free_i64(fp1
);
7259 gen_store_fpr64(ctx
, fp0
, fd
);
7260 tcg_temp_free_i64(fp0
);
7273 case OPC_CMP_NGLE_D
:
7280 if (ctx
->opcode
& (1 << 6)) {
7281 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
7282 opn
= condnames_abs
[func
-48];
7284 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
7285 opn
= condnames
[func
-48];
7289 check_cp1_registers(ctx
, fs
);
7291 TCGv_i32 fp32
= tcg_temp_new_i32();
7292 TCGv_i64 fp64
= tcg_temp_new_i64();
7294 gen_load_fpr64(ctx
, fp64
, fs
);
7295 gen_helper_float_cvts_d(fp32
, fp64
);
7296 tcg_temp_free_i64(fp64
);
7297 gen_store_fpr32(fp32
, fd
);
7298 tcg_temp_free_i32(fp32
);
7303 check_cp1_registers(ctx
, fs
);
7305 TCGv_i32 fp32
= tcg_temp_new_i32();
7306 TCGv_i64 fp64
= tcg_temp_new_i64();
7308 gen_load_fpr64(ctx
, fp64
, fs
);
7309 gen_helper_float_cvtw_d(fp32
, fp64
);
7310 tcg_temp_free_i64(fp64
);
7311 gen_store_fpr32(fp32
, fd
);
7312 tcg_temp_free_i32(fp32
);
7317 check_cp1_64bitmode(ctx
);
7319 TCGv_i64 fp0
= tcg_temp_new_i64();
7321 gen_load_fpr64(ctx
, fp0
, fs
);
7322 gen_helper_float_cvtl_d(fp0
, fp0
);
7323 gen_store_fpr64(ctx
, fp0
, fd
);
7324 tcg_temp_free_i64(fp0
);
7330 TCGv_i32 fp0
= tcg_temp_new_i32();
7332 gen_load_fpr32(fp0
, fs
);
7333 gen_helper_float_cvts_w(fp0
, fp0
);
7334 gen_store_fpr32(fp0
, fd
);
7335 tcg_temp_free_i32(fp0
);
7340 check_cp1_registers(ctx
, fd
);
7342 TCGv_i32 fp32
= tcg_temp_new_i32();
7343 TCGv_i64 fp64
= tcg_temp_new_i64();
7345 gen_load_fpr32(fp32
, fs
);
7346 gen_helper_float_cvtd_w(fp64
, fp32
);
7347 tcg_temp_free_i32(fp32
);
7348 gen_store_fpr64(ctx
, fp64
, fd
);
7349 tcg_temp_free_i64(fp64
);
7354 check_cp1_64bitmode(ctx
);
7356 TCGv_i32 fp32
= tcg_temp_new_i32();
7357 TCGv_i64 fp64
= tcg_temp_new_i64();
7359 gen_load_fpr64(ctx
, fp64
, fs
);
7360 gen_helper_float_cvts_l(fp32
, fp64
);
7361 tcg_temp_free_i64(fp64
);
7362 gen_store_fpr32(fp32
, fd
);
7363 tcg_temp_free_i32(fp32
);
7368 check_cp1_64bitmode(ctx
);
7370 TCGv_i64 fp0
= tcg_temp_new_i64();
7372 gen_load_fpr64(ctx
, fp0
, fs
);
7373 gen_helper_float_cvtd_l(fp0
, fp0
);
7374 gen_store_fpr64(ctx
, fp0
, fd
);
7375 tcg_temp_free_i64(fp0
);
7380 check_cp1_64bitmode(ctx
);
7382 TCGv_i64 fp0
= tcg_temp_new_i64();
7384 gen_load_fpr64(ctx
, fp0
, fs
);
7385 gen_helper_float_cvtps_pw(fp0
, fp0
);
7386 gen_store_fpr64(ctx
, fp0
, fd
);
7387 tcg_temp_free_i64(fp0
);
7392 check_cp1_64bitmode(ctx
);
7394 TCGv_i64 fp0
= tcg_temp_new_i64();
7395 TCGv_i64 fp1
= tcg_temp_new_i64();
7397 gen_load_fpr64(ctx
, fp0
, fs
);
7398 gen_load_fpr64(ctx
, fp1
, ft
);
7399 gen_helper_float_add_ps(fp0
, fp0
, fp1
);
7400 tcg_temp_free_i64(fp1
);
7401 gen_store_fpr64(ctx
, fp0
, fd
);
7402 tcg_temp_free_i64(fp0
);
7407 check_cp1_64bitmode(ctx
);
7409 TCGv_i64 fp0
= tcg_temp_new_i64();
7410 TCGv_i64 fp1
= tcg_temp_new_i64();
7412 gen_load_fpr64(ctx
, fp0
, fs
);
7413 gen_load_fpr64(ctx
, fp1
, ft
);
7414 gen_helper_float_sub_ps(fp0
, fp0
, fp1
);
7415 tcg_temp_free_i64(fp1
);
7416 gen_store_fpr64(ctx
, fp0
, fd
);
7417 tcg_temp_free_i64(fp0
);
7422 check_cp1_64bitmode(ctx
);
7424 TCGv_i64 fp0
= tcg_temp_new_i64();
7425 TCGv_i64 fp1
= tcg_temp_new_i64();
7427 gen_load_fpr64(ctx
, fp0
, fs
);
7428 gen_load_fpr64(ctx
, fp1
, ft
);
7429 gen_helper_float_mul_ps(fp0
, fp0
, fp1
);
7430 tcg_temp_free_i64(fp1
);
7431 gen_store_fpr64(ctx
, fp0
, fd
);
7432 tcg_temp_free_i64(fp0
);
7437 check_cp1_64bitmode(ctx
);
7439 TCGv_i64 fp0
= tcg_temp_new_i64();
7441 gen_load_fpr64(ctx
, fp0
, fs
);
7442 gen_helper_float_abs_ps(fp0
, fp0
);
7443 gen_store_fpr64(ctx
, fp0
, fd
);
7444 tcg_temp_free_i64(fp0
);
7449 check_cp1_64bitmode(ctx
);
7451 TCGv_i64 fp0
= tcg_temp_new_i64();
7453 gen_load_fpr64(ctx
, fp0
, fs
);
7454 gen_store_fpr64(ctx
, fp0
, fd
);
7455 tcg_temp_free_i64(fp0
);
7460 check_cp1_64bitmode(ctx
);
7462 TCGv_i64 fp0
= tcg_temp_new_i64();
7464 gen_load_fpr64(ctx
, fp0
, fs
);
7465 gen_helper_float_chs_ps(fp0
, fp0
);
7466 gen_store_fpr64(ctx
, fp0
, fd
);
7467 tcg_temp_free_i64(fp0
);
7472 check_cp1_64bitmode(ctx
);
7473 gen_movcf_ps(fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
7477 check_cp1_64bitmode(ctx
);
7479 int l1
= gen_new_label();
7483 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
7484 fp0
= tcg_temp_new_i64();
7485 gen_load_fpr64(ctx
, fp0
, fs
);
7486 gen_store_fpr64(ctx
, fp0
, fd
);
7487 tcg_temp_free_i64(fp0
);
7493 check_cp1_64bitmode(ctx
);
7495 int l1
= gen_new_label();
7499 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
7500 fp0
= tcg_temp_new_i64();
7501 gen_load_fpr64(ctx
, fp0
, fs
);
7502 gen_store_fpr64(ctx
, fp0
, fd
);
7503 tcg_temp_free_i64(fp0
);
7510 check_cp1_64bitmode(ctx
);
7512 TCGv_i64 fp0
= tcg_temp_new_i64();
7513 TCGv_i64 fp1
= tcg_temp_new_i64();
7515 gen_load_fpr64(ctx
, fp0
, ft
);
7516 gen_load_fpr64(ctx
, fp1
, fs
);
7517 gen_helper_float_addr_ps(fp0
, fp0
, fp1
);
7518 tcg_temp_free_i64(fp1
);
7519 gen_store_fpr64(ctx
, fp0
, fd
);
7520 tcg_temp_free_i64(fp0
);
7525 check_cp1_64bitmode(ctx
);
7527 TCGv_i64 fp0
= tcg_temp_new_i64();
7528 TCGv_i64 fp1
= tcg_temp_new_i64();
7530 gen_load_fpr64(ctx
, fp0
, ft
);
7531 gen_load_fpr64(ctx
, fp1
, fs
);
7532 gen_helper_float_mulr_ps(fp0
, fp0
, fp1
);
7533 tcg_temp_free_i64(fp1
);
7534 gen_store_fpr64(ctx
, fp0
, fd
);
7535 tcg_temp_free_i64(fp0
);
7540 check_cp1_64bitmode(ctx
);
7542 TCGv_i64 fp0
= tcg_temp_new_i64();
7543 TCGv_i64 fp1
= tcg_temp_new_i64();
7545 gen_load_fpr64(ctx
, fp0
, fs
);
7546 gen_load_fpr64(ctx
, fp1
, fd
);
7547 gen_helper_float_recip2_ps(fp0
, fp0
, fp1
);
7548 tcg_temp_free_i64(fp1
);
7549 gen_store_fpr64(ctx
, fp0
, fd
);
7550 tcg_temp_free_i64(fp0
);
7555 check_cp1_64bitmode(ctx
);
7557 TCGv_i64 fp0
= tcg_temp_new_i64();
7559 gen_load_fpr64(ctx
, fp0
, fs
);
7560 gen_helper_float_recip1_ps(fp0
, fp0
);
7561 gen_store_fpr64(ctx
, fp0
, fd
);
7562 tcg_temp_free_i64(fp0
);
7567 check_cp1_64bitmode(ctx
);
7569 TCGv_i64 fp0
= tcg_temp_new_i64();
7571 gen_load_fpr64(ctx
, fp0
, fs
);
7572 gen_helper_float_rsqrt1_ps(fp0
, fp0
);
7573 gen_store_fpr64(ctx
, fp0
, fd
);
7574 tcg_temp_free_i64(fp0
);
7579 check_cp1_64bitmode(ctx
);
7581 TCGv_i64 fp0
= tcg_temp_new_i64();
7582 TCGv_i64 fp1
= tcg_temp_new_i64();
7584 gen_load_fpr64(ctx
, fp0
, fs
);
7585 gen_load_fpr64(ctx
, fp1
, ft
);
7586 gen_helper_float_rsqrt2_ps(fp0
, fp0
, fp1
);
7587 tcg_temp_free_i64(fp1
);
7588 gen_store_fpr64(ctx
, fp0
, fd
);
7589 tcg_temp_free_i64(fp0
);
7594 check_cp1_64bitmode(ctx
);
7596 TCGv_i32 fp0
= tcg_temp_new_i32();
7598 gen_load_fpr32h(fp0
, fs
);
7599 gen_helper_float_cvts_pu(fp0
, fp0
);
7600 gen_store_fpr32(fp0
, fd
);
7601 tcg_temp_free_i32(fp0
);
7606 check_cp1_64bitmode(ctx
);
7608 TCGv_i64 fp0
= tcg_temp_new_i64();
7610 gen_load_fpr64(ctx
, fp0
, fs
);
7611 gen_helper_float_cvtpw_ps(fp0
, fp0
);
7612 gen_store_fpr64(ctx
, fp0
, fd
);
7613 tcg_temp_free_i64(fp0
);
7618 check_cp1_64bitmode(ctx
);
7620 TCGv_i32 fp0
= tcg_temp_new_i32();
7622 gen_load_fpr32(fp0
, fs
);
7623 gen_helper_float_cvts_pl(fp0
, fp0
);
7624 gen_store_fpr32(fp0
, fd
);
7625 tcg_temp_free_i32(fp0
);
7630 check_cp1_64bitmode(ctx
);
7632 TCGv_i32 fp0
= tcg_temp_new_i32();
7633 TCGv_i32 fp1
= tcg_temp_new_i32();
7635 gen_load_fpr32(fp0
, fs
);
7636 gen_load_fpr32(fp1
, ft
);
7637 gen_store_fpr32h(fp0
, fd
);
7638 gen_store_fpr32(fp1
, fd
);
7639 tcg_temp_free_i32(fp0
);
7640 tcg_temp_free_i32(fp1
);
7645 check_cp1_64bitmode(ctx
);
7647 TCGv_i32 fp0
= tcg_temp_new_i32();
7648 TCGv_i32 fp1
= tcg_temp_new_i32();
7650 gen_load_fpr32(fp0
, fs
);
7651 gen_load_fpr32h(fp1
, ft
);
7652 gen_store_fpr32(fp1
, fd
);
7653 gen_store_fpr32h(fp0
, fd
);
7654 tcg_temp_free_i32(fp0
);
7655 tcg_temp_free_i32(fp1
);
7660 check_cp1_64bitmode(ctx
);
7662 TCGv_i32 fp0
= tcg_temp_new_i32();
7663 TCGv_i32 fp1
= tcg_temp_new_i32();
7665 gen_load_fpr32h(fp0
, fs
);
7666 gen_load_fpr32(fp1
, ft
);
7667 gen_store_fpr32(fp1
, fd
);
7668 gen_store_fpr32h(fp0
, fd
);
7669 tcg_temp_free_i32(fp0
);
7670 tcg_temp_free_i32(fp1
);
7675 check_cp1_64bitmode(ctx
);
7677 TCGv_i32 fp0
= tcg_temp_new_i32();
7678 TCGv_i32 fp1
= tcg_temp_new_i32();
7680 gen_load_fpr32h(fp0
, fs
);
7681 gen_load_fpr32h(fp1
, ft
);
7682 gen_store_fpr32(fp1
, fd
);
7683 gen_store_fpr32h(fp0
, fd
);
7684 tcg_temp_free_i32(fp0
);
7685 tcg_temp_free_i32(fp1
);
7692 case OPC_CMP_UEQ_PS
:
7693 case OPC_CMP_OLT_PS
:
7694 case OPC_CMP_ULT_PS
:
7695 case OPC_CMP_OLE_PS
:
7696 case OPC_CMP_ULE_PS
:
7698 case OPC_CMP_NGLE_PS
:
7699 case OPC_CMP_SEQ_PS
:
7700 case OPC_CMP_NGL_PS
:
7702 case OPC_CMP_NGE_PS
:
7704 case OPC_CMP_NGT_PS
:
7705 if (ctx
->opcode
& (1 << 6)) {
7706 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
7707 opn
= condnames_abs
[func
-48];
7709 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
7710 opn
= condnames
[func
-48];
7715 generate_exception (ctx
, EXCP_RI
);
7718 (void)opn
; /* avoid a compiler warning */
7721 MIPS_DEBUG("%s %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fs
], fregnames
[ft
]);
7724 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fs
], fregnames
[ft
]);
7727 MIPS_DEBUG("%s %s,%s", opn
, fregnames
[fd
], fregnames
[fs
]);
7732 /* Coprocessor 3 (FPU) */
7733 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
7734 int fd
, int fs
, int base
, int index
)
7736 const char *opn
= "extended float load/store";
7738 TCGv t0
= tcg_temp_new();
7741 gen_load_gpr(t0
, index
);
7742 } else if (index
== 0) {
7743 gen_load_gpr(t0
, base
);
7745 gen_load_gpr(t0
, index
);
7746 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], t0
);
7748 /* Don't do NOP if destination is zero: we must perform the actual
7750 save_cpu_state(ctx
, 0);
7755 TCGv_i32 fp0
= tcg_temp_new_i32();
7757 tcg_gen_qemu_ld32s(t0
, t0
, ctx
->mem_idx
);
7758 tcg_gen_trunc_tl_i32(fp0
, t0
);
7759 gen_store_fpr32(fp0
, fd
);
7760 tcg_temp_free_i32(fp0
);
7766 check_cp1_registers(ctx
, fd
);
7768 TCGv_i64 fp0
= tcg_temp_new_i64();
7770 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7771 gen_store_fpr64(ctx
, fp0
, fd
);
7772 tcg_temp_free_i64(fp0
);
7777 check_cp1_64bitmode(ctx
);
7778 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7780 TCGv_i64 fp0
= tcg_temp_new_i64();
7782 tcg_gen_qemu_ld64(fp0
, t0
, ctx
->mem_idx
);
7783 gen_store_fpr64(ctx
, fp0
, fd
);
7784 tcg_temp_free_i64(fp0
);
7791 TCGv_i32 fp0
= tcg_temp_new_i32();
7792 TCGv t1
= tcg_temp_new();
7794 gen_load_fpr32(fp0
, fs
);
7795 tcg_gen_extu_i32_tl(t1
, fp0
);
7796 tcg_gen_qemu_st32(t1
, t0
, ctx
->mem_idx
);
7797 tcg_temp_free_i32(fp0
);
7805 check_cp1_registers(ctx
, fs
);
7807 TCGv_i64 fp0
= tcg_temp_new_i64();
7809 gen_load_fpr64(ctx
, fp0
, fs
);
7810 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7811 tcg_temp_free_i64(fp0
);
7817 check_cp1_64bitmode(ctx
);
7818 tcg_gen_andi_tl(t0
, t0
, ~0x7);
7820 TCGv_i64 fp0
= tcg_temp_new_i64();
7822 gen_load_fpr64(ctx
, fp0
, fs
);
7823 tcg_gen_qemu_st64(fp0
, t0
, ctx
->mem_idx
);
7824 tcg_temp_free_i64(fp0
);
7831 (void)opn
; (void)store
; /* avoid compiler warnings */
7832 MIPS_DEBUG("%s %s, %s(%s)", opn
, fregnames
[store
? fs
: fd
],
7833 regnames
[index
], regnames
[base
]);
7836 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
7837 int fd
, int fr
, int fs
, int ft
)
7839 const char *opn
= "flt3_arith";
7843 check_cp1_64bitmode(ctx
);
7845 TCGv t0
= tcg_temp_local_new();
7846 TCGv_i32 fp
= tcg_temp_new_i32();
7847 TCGv_i32 fph
= tcg_temp_new_i32();
7848 int l1
= gen_new_label();
7849 int l2
= gen_new_label();
7851 gen_load_gpr(t0
, fr
);
7852 tcg_gen_andi_tl(t0
, t0
, 0x7);
7854 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
7855 gen_load_fpr32(fp
, fs
);
7856 gen_load_fpr32h(fph
, fs
);
7857 gen_store_fpr32(fp
, fd
);
7858 gen_store_fpr32h(fph
, fd
);
7861 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
7863 #ifdef TARGET_WORDS_BIGENDIAN
7864 gen_load_fpr32(fp
, fs
);
7865 gen_load_fpr32h(fph
, ft
);
7866 gen_store_fpr32h(fp
, fd
);
7867 gen_store_fpr32(fph
, fd
);
7869 gen_load_fpr32h(fph
, fs
);
7870 gen_load_fpr32(fp
, ft
);
7871 gen_store_fpr32(fph
, fd
);
7872 gen_store_fpr32h(fp
, fd
);
7875 tcg_temp_free_i32(fp
);
7876 tcg_temp_free_i32(fph
);
7883 TCGv_i32 fp0
= tcg_temp_new_i32();
7884 TCGv_i32 fp1
= tcg_temp_new_i32();
7885 TCGv_i32 fp2
= tcg_temp_new_i32();
7887 gen_load_fpr32(fp0
, fs
);
7888 gen_load_fpr32(fp1
, ft
);
7889 gen_load_fpr32(fp2
, fr
);
7890 gen_helper_float_muladd_s(fp2
, fp0
, fp1
, fp2
);
7891 tcg_temp_free_i32(fp0
);
7892 tcg_temp_free_i32(fp1
);
7893 gen_store_fpr32(fp2
, fd
);
7894 tcg_temp_free_i32(fp2
);
7900 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7902 TCGv_i64 fp0
= tcg_temp_new_i64();
7903 TCGv_i64 fp1
= tcg_temp_new_i64();
7904 TCGv_i64 fp2
= tcg_temp_new_i64();
7906 gen_load_fpr64(ctx
, fp0
, fs
);
7907 gen_load_fpr64(ctx
, fp1
, ft
);
7908 gen_load_fpr64(ctx
, fp2
, fr
);
7909 gen_helper_float_muladd_d(fp2
, fp0
, fp1
, fp2
);
7910 tcg_temp_free_i64(fp0
);
7911 tcg_temp_free_i64(fp1
);
7912 gen_store_fpr64(ctx
, fp2
, fd
);
7913 tcg_temp_free_i64(fp2
);
7918 check_cp1_64bitmode(ctx
);
7920 TCGv_i64 fp0
= tcg_temp_new_i64();
7921 TCGv_i64 fp1
= tcg_temp_new_i64();
7922 TCGv_i64 fp2
= tcg_temp_new_i64();
7924 gen_load_fpr64(ctx
, fp0
, fs
);
7925 gen_load_fpr64(ctx
, fp1
, ft
);
7926 gen_load_fpr64(ctx
, fp2
, fr
);
7927 gen_helper_float_muladd_ps(fp2
, fp0
, fp1
, fp2
);
7928 tcg_temp_free_i64(fp0
);
7929 tcg_temp_free_i64(fp1
);
7930 gen_store_fpr64(ctx
, fp2
, fd
);
7931 tcg_temp_free_i64(fp2
);
7938 TCGv_i32 fp0
= tcg_temp_new_i32();
7939 TCGv_i32 fp1
= tcg_temp_new_i32();
7940 TCGv_i32 fp2
= tcg_temp_new_i32();
7942 gen_load_fpr32(fp0
, fs
);
7943 gen_load_fpr32(fp1
, ft
);
7944 gen_load_fpr32(fp2
, fr
);
7945 gen_helper_float_mulsub_s(fp2
, fp0
, fp1
, fp2
);
7946 tcg_temp_free_i32(fp0
);
7947 tcg_temp_free_i32(fp1
);
7948 gen_store_fpr32(fp2
, fd
);
7949 tcg_temp_free_i32(fp2
);
7955 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
7957 TCGv_i64 fp0
= tcg_temp_new_i64();
7958 TCGv_i64 fp1
= tcg_temp_new_i64();
7959 TCGv_i64 fp2
= tcg_temp_new_i64();
7961 gen_load_fpr64(ctx
, fp0
, fs
);
7962 gen_load_fpr64(ctx
, fp1
, ft
);
7963 gen_load_fpr64(ctx
, fp2
, fr
);
7964 gen_helper_float_mulsub_d(fp2
, fp0
, fp1
, fp2
);
7965 tcg_temp_free_i64(fp0
);
7966 tcg_temp_free_i64(fp1
);
7967 gen_store_fpr64(ctx
, fp2
, fd
);
7968 tcg_temp_free_i64(fp2
);
7973 check_cp1_64bitmode(ctx
);
7975 TCGv_i64 fp0
= tcg_temp_new_i64();
7976 TCGv_i64 fp1
= tcg_temp_new_i64();
7977 TCGv_i64 fp2
= tcg_temp_new_i64();
7979 gen_load_fpr64(ctx
, fp0
, fs
);
7980 gen_load_fpr64(ctx
, fp1
, ft
);
7981 gen_load_fpr64(ctx
, fp2
, fr
);
7982 gen_helper_float_mulsub_ps(fp2
, fp0
, fp1
, fp2
);
7983 tcg_temp_free_i64(fp0
);
7984 tcg_temp_free_i64(fp1
);
7985 gen_store_fpr64(ctx
, fp2
, fd
);
7986 tcg_temp_free_i64(fp2
);
7993 TCGv_i32 fp0
= tcg_temp_new_i32();
7994 TCGv_i32 fp1
= tcg_temp_new_i32();
7995 TCGv_i32 fp2
= tcg_temp_new_i32();
7997 gen_load_fpr32(fp0
, fs
);
7998 gen_load_fpr32(fp1
, ft
);
7999 gen_load_fpr32(fp2
, fr
);
8000 gen_helper_float_nmuladd_s(fp2
, fp0
, fp1
, fp2
);
8001 tcg_temp_free_i32(fp0
);
8002 tcg_temp_free_i32(fp1
);
8003 gen_store_fpr32(fp2
, fd
);
8004 tcg_temp_free_i32(fp2
);
8010 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8012 TCGv_i64 fp0
= tcg_temp_new_i64();
8013 TCGv_i64 fp1
= tcg_temp_new_i64();
8014 TCGv_i64 fp2
= tcg_temp_new_i64();
8016 gen_load_fpr64(ctx
, fp0
, fs
);
8017 gen_load_fpr64(ctx
, fp1
, ft
);
8018 gen_load_fpr64(ctx
, fp2
, fr
);
8019 gen_helper_float_nmuladd_d(fp2
, fp0
, fp1
, fp2
);
8020 tcg_temp_free_i64(fp0
);
8021 tcg_temp_free_i64(fp1
);
8022 gen_store_fpr64(ctx
, fp2
, fd
);
8023 tcg_temp_free_i64(fp2
);
8028 check_cp1_64bitmode(ctx
);
8030 TCGv_i64 fp0
= tcg_temp_new_i64();
8031 TCGv_i64 fp1
= tcg_temp_new_i64();
8032 TCGv_i64 fp2
= tcg_temp_new_i64();
8034 gen_load_fpr64(ctx
, fp0
, fs
);
8035 gen_load_fpr64(ctx
, fp1
, ft
);
8036 gen_load_fpr64(ctx
, fp2
, fr
);
8037 gen_helper_float_nmuladd_ps(fp2
, fp0
, fp1
, fp2
);
8038 tcg_temp_free_i64(fp0
);
8039 tcg_temp_free_i64(fp1
);
8040 gen_store_fpr64(ctx
, fp2
, fd
);
8041 tcg_temp_free_i64(fp2
);
8048 TCGv_i32 fp0
= tcg_temp_new_i32();
8049 TCGv_i32 fp1
= tcg_temp_new_i32();
8050 TCGv_i32 fp2
= tcg_temp_new_i32();
8052 gen_load_fpr32(fp0
, fs
);
8053 gen_load_fpr32(fp1
, ft
);
8054 gen_load_fpr32(fp2
, fr
);
8055 gen_helper_float_nmulsub_s(fp2
, fp0
, fp1
, fp2
);
8056 tcg_temp_free_i32(fp0
);
8057 tcg_temp_free_i32(fp1
);
8058 gen_store_fpr32(fp2
, fd
);
8059 tcg_temp_free_i32(fp2
);
8065 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
8067 TCGv_i64 fp0
= tcg_temp_new_i64();
8068 TCGv_i64 fp1
= tcg_temp_new_i64();
8069 TCGv_i64 fp2
= tcg_temp_new_i64();
8071 gen_load_fpr64(ctx
, fp0
, fs
);
8072 gen_load_fpr64(ctx
, fp1
, ft
);
8073 gen_load_fpr64(ctx
, fp2
, fr
);
8074 gen_helper_float_nmulsub_d(fp2
, fp0
, fp1
, fp2
);
8075 tcg_temp_free_i64(fp0
);
8076 tcg_temp_free_i64(fp1
);
8077 gen_store_fpr64(ctx
, fp2
, fd
);
8078 tcg_temp_free_i64(fp2
);
8083 check_cp1_64bitmode(ctx
);
8085 TCGv_i64 fp0
= tcg_temp_new_i64();
8086 TCGv_i64 fp1
= tcg_temp_new_i64();
8087 TCGv_i64 fp2
= tcg_temp_new_i64();
8089 gen_load_fpr64(ctx
, fp0
, fs
);
8090 gen_load_fpr64(ctx
, fp1
, ft
);
8091 gen_load_fpr64(ctx
, fp2
, fr
);
8092 gen_helper_float_nmulsub_ps(fp2
, fp0
, fp1
, fp2
);
8093 tcg_temp_free_i64(fp0
);
8094 tcg_temp_free_i64(fp1
);
8095 gen_store_fpr64(ctx
, fp2
, fd
);
8096 tcg_temp_free_i64(fp2
);
8102 generate_exception (ctx
, EXCP_RI
);
8105 (void)opn
; /* avoid a compiler warning */
8106 MIPS_DEBUG("%s %s, %s, %s, %s", opn
, fregnames
[fd
], fregnames
[fr
],
8107 fregnames
[fs
], fregnames
[ft
]);
8111 gen_rdhwr (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
)
8115 check_insn(env
, ctx
, ISA_MIPS32R2
);
8116 t0
= tcg_temp_new();
8120 save_cpu_state(ctx
, 1);
8121 gen_helper_rdhwr_cpunum(t0
);
8122 gen_store_gpr(t0
, rt
);
8125 save_cpu_state(ctx
, 1);
8126 gen_helper_rdhwr_synci_step(t0
);
8127 gen_store_gpr(t0
, rt
);
8130 save_cpu_state(ctx
, 1);
8131 gen_helper_rdhwr_cc(t0
);
8132 gen_store_gpr(t0
, rt
);
8135 save_cpu_state(ctx
, 1);
8136 gen_helper_rdhwr_ccres(t0
);
8137 gen_store_gpr(t0
, rt
);
8140 #if defined(CONFIG_USER_ONLY)
8141 tcg_gen_ld_tl(t0
, cpu_env
, offsetof(CPUMIPSState
, tls_value
));
8142 gen_store_gpr(t0
, rt
);
8145 /* XXX: Some CPUs implement this in hardware.
8146 Not supported yet. */
8148 default: /* Invalid */
8149 MIPS_INVAL("rdhwr");
8150 generate_exception(ctx
, EXCP_RI
);
8156 static void handle_delay_slot (CPUMIPSState
*env
, DisasContext
*ctx
,
8159 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8160 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
8161 /* Branches completion */
8162 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
8163 ctx
->bstate
= BS_BRANCH
;
8164 save_cpu_state(ctx
, 0);
8165 /* FIXME: Need to clear can_do_io. */
8166 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
8168 /* unconditional branch */
8169 MIPS_DEBUG("unconditional branch");
8170 if (proc_hflags
& MIPS_HFLAG_BX
) {
8171 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
8173 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8176 /* blikely taken case */
8177 MIPS_DEBUG("blikely branch taken");
8178 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8181 /* Conditional branch */
8182 MIPS_DEBUG("conditional branch");
8184 int l1
= gen_new_label();
8186 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
8187 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
8189 gen_goto_tb(ctx
, 0, ctx
->btarget
);
8193 /* unconditional branch to register */
8194 MIPS_DEBUG("branch to register");
8195 if (env
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
8196 TCGv t0
= tcg_temp_new();
8197 TCGv_i32 t1
= tcg_temp_new_i32();
8199 tcg_gen_andi_tl(t0
, btarget
, 0x1);
8200 tcg_gen_trunc_tl_i32(t1
, t0
);
8202 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
8203 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
8204 tcg_gen_or_i32(hflags
, hflags
, t1
);
8205 tcg_temp_free_i32(t1
);
8207 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
8209 tcg_gen_mov_tl(cpu_PC
, btarget
);
8211 if (ctx
->singlestep_enabled
) {
8212 save_cpu_state(ctx
, 0);
8213 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
8218 MIPS_DEBUG("unknown branch");
8224 /* ISA extensions (ASEs) */
8225 /* MIPS16 extension to MIPS32 */
8227 /* MIPS16 major opcodes */
8229 M16_OPC_ADDIUSP
= 0x00,
8230 M16_OPC_ADDIUPC
= 0x01,
8233 M16_OPC_BEQZ
= 0x04,
8234 M16_OPC_BNEQZ
= 0x05,
8235 M16_OPC_SHIFT
= 0x06,
8237 M16_OPC_RRIA
= 0x08,
8238 M16_OPC_ADDIU8
= 0x09,
8239 M16_OPC_SLTI
= 0x0a,
8240 M16_OPC_SLTIU
= 0x0b,
8243 M16_OPC_CMPI
= 0x0e,
8247 M16_OPC_LWSP
= 0x12,
8251 M16_OPC_LWPC
= 0x16,
8255 M16_OPC_SWSP
= 0x1a,
8259 M16_OPC_EXTEND
= 0x1e,
8263 /* I8 funct field */
8282 /* RR funct field */
8316 /* I64 funct field */
8328 /* RR ry field for CNVT */
8330 RR_RY_CNVT_ZEB
= 0x0,
8331 RR_RY_CNVT_ZEH
= 0x1,
8332 RR_RY_CNVT_ZEW
= 0x2,
8333 RR_RY_CNVT_SEB
= 0x4,
8334 RR_RY_CNVT_SEH
= 0x5,
8335 RR_RY_CNVT_SEW
= 0x6,
8338 static int xlat (int r
)
8340 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
8345 static void gen_mips16_save (DisasContext
*ctx
,
8346 int xsregs
, int aregs
,
8347 int do_ra
, int do_s0
, int do_s1
,
8350 TCGv t0
= tcg_temp_new();
8351 TCGv t1
= tcg_temp_new();
8381 generate_exception(ctx
, EXCP_RI
);
8387 gen_base_offset_addr(ctx
, t0
, 29, 12);
8388 gen_load_gpr(t1
, 7);
8389 op_st_sw(t1
, t0
, ctx
);
8392 gen_base_offset_addr(ctx
, t0
, 29, 8);
8393 gen_load_gpr(t1
, 6);
8394 op_st_sw(t1
, t0
, ctx
);
8397 gen_base_offset_addr(ctx
, t0
, 29, 4);
8398 gen_load_gpr(t1
, 5);
8399 op_st_sw(t1
, t0
, ctx
);
8402 gen_base_offset_addr(ctx
, t0
, 29, 0);
8403 gen_load_gpr(t1
, 4);
8404 op_st_sw(t1
, t0
, ctx
);
8407 gen_load_gpr(t0
, 29);
8409 #define DECR_AND_STORE(reg) do { \
8410 tcg_gen_subi_tl(t0, t0, 4); \
8411 gen_load_gpr(t1, reg); \
8412 op_st_sw(t1, t0, ctx); \
8476 generate_exception(ctx
, EXCP_RI
);
8492 #undef DECR_AND_STORE
8494 tcg_gen_subi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8499 static void gen_mips16_restore (DisasContext
*ctx
,
8500 int xsregs
, int aregs
,
8501 int do_ra
, int do_s0
, int do_s1
,
8505 TCGv t0
= tcg_temp_new();
8506 TCGv t1
= tcg_temp_new();
8508 tcg_gen_addi_tl(t0
, cpu_gpr
[29], framesize
);
8510 #define DECR_AND_LOAD(reg) do { \
8511 tcg_gen_subi_tl(t0, t0, 4); \
8512 op_ld_lw(t1, t0, ctx); \
8513 gen_store_gpr(t1, reg); \
8577 generate_exception(ctx
, EXCP_RI
);
8593 #undef DECR_AND_LOAD
8595 tcg_gen_addi_tl(cpu_gpr
[29], cpu_gpr
[29], framesize
);
8600 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
8601 int is_64_bit
, int extended
)
8605 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8606 generate_exception(ctx
, EXCP_RI
);
8610 t0
= tcg_temp_new();
8612 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
8613 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
8615 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
8621 #if defined(TARGET_MIPS64)
8622 static void decode_i64_mips16 (CPUMIPSState
*env
, DisasContext
*ctx
,
8623 int ry
, int funct
, int16_t offset
,
8629 offset
= extended
? offset
: offset
<< 3;
8630 gen_ld(env
, ctx
, OPC_LD
, ry
, 29, offset
);
8634 offset
= extended
? offset
: offset
<< 3;
8635 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
8639 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
8640 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
8644 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
8645 gen_arith_imm(env
, ctx
, OPC_DADDIU
, 29, 29, offset
);
8648 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8649 generate_exception(ctx
, EXCP_RI
);
8651 offset
= extended
? offset
: offset
<< 3;
8652 gen_ld(env
, ctx
, OPC_LDPC
, ry
, 0, offset
);
8657 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
8658 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, ry
, offset
);
8662 offset
= extended
? offset
: offset
<< 2;
8663 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
8667 offset
= extended
? offset
: offset
<< 2;
8668 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, 29, offset
);
8674 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
8677 int extend
= lduw_code(ctx
->pc
+ 2);
8678 int op
, rx
, ry
, funct
, sa
;
8679 int16_t imm
, offset
;
8681 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
8682 op
= (ctx
->opcode
>> 11) & 0x1f;
8683 sa
= (ctx
->opcode
>> 22) & 0x1f;
8684 funct
= (ctx
->opcode
>> 8) & 0x7;
8685 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8686 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8687 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
8688 | ((ctx
->opcode
>> 21) & 0x3f) << 5
8689 | (ctx
->opcode
& 0x1f));
8691 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
8694 case M16_OPC_ADDIUSP
:
8695 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8697 case M16_OPC_ADDIUPC
:
8698 gen_addiupc(ctx
, rx
, imm
, 0, 1);
8701 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1);
8702 /* No delay slot, so just process as a normal instruction */
8705 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1);
8706 /* No delay slot, so just process as a normal instruction */
8709 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1);
8710 /* No delay slot, so just process as a normal instruction */
8713 switch (ctx
->opcode
& 0x3) {
8715 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8718 #if defined(TARGET_MIPS64)
8720 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8722 generate_exception(ctx
, EXCP_RI
);
8726 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8729 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8733 #if defined(TARGET_MIPS64)
8736 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
);
8740 imm
= ctx
->opcode
& 0xf;
8741 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
8742 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
8743 imm
= (int16_t) (imm
<< 1) >> 1;
8744 if ((ctx
->opcode
>> 4) & 0x1) {
8745 #if defined(TARGET_MIPS64)
8747 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8749 generate_exception(ctx
, EXCP_RI
);
8752 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8755 case M16_OPC_ADDIU8
:
8756 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8759 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8762 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8767 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1);
8770 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1);
8773 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
8776 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
);
8780 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
8781 int aregs
= (ctx
->opcode
>> 16) & 0xf;
8782 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
8783 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
8784 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
8785 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
8786 | (ctx
->opcode
& 0xf)) << 3;
8788 if (ctx
->opcode
& (1 << 7)) {
8789 gen_mips16_save(ctx
, xsregs
, aregs
,
8790 do_ra
, do_s0
, do_s1
,
8793 gen_mips16_restore(ctx
, xsregs
, aregs
,
8794 do_ra
, do_s0
, do_s1
,
8800 generate_exception(ctx
, EXCP_RI
);
8805 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
8808 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
8810 #if defined(TARGET_MIPS64)
8812 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
8816 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
8819 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
);
8822 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, offset
);
8825 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
);
8828 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
8831 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
);
8834 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, offset
);
8836 #if defined(TARGET_MIPS64)
8838 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
);
8842 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
8845 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
8848 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
8851 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
8853 #if defined(TARGET_MIPS64)
8855 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 1);
8859 generate_exception(ctx
, EXCP_RI
);
8866 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
8871 int op
, cnvt_op
, op1
, offset
;
8875 op
= (ctx
->opcode
>> 11) & 0x1f;
8876 sa
= (ctx
->opcode
>> 2) & 0x7;
8877 sa
= sa
== 0 ? 8 : sa
;
8878 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
8879 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
8880 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
8881 op1
= offset
= ctx
->opcode
& 0x1f;
8886 case M16_OPC_ADDIUSP
:
8888 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
8890 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 29, imm
);
8893 case M16_OPC_ADDIUPC
:
8894 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
8897 offset
= (ctx
->opcode
& 0x7ff) << 1;
8898 offset
= (int16_t)(offset
<< 4) >> 4;
8899 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
);
8900 /* No delay slot, so just process as a normal instruction */
8903 offset
= lduw_code(ctx
->pc
+ 2);
8904 offset
= (((ctx
->opcode
& 0x1f) << 21)
8905 | ((ctx
->opcode
>> 5) & 0x1f) << 16
8907 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALXS
: OPC_JALS
;
8908 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
);
8913 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8914 /* No delay slot, so just process as a normal instruction */
8917 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0, ((int8_t)ctx
->opcode
) << 1);
8918 /* No delay slot, so just process as a normal instruction */
8921 switch (ctx
->opcode
& 0x3) {
8923 gen_shift_imm(env
, ctx
, OPC_SLL
, rx
, ry
, sa
);
8926 #if defined(TARGET_MIPS64)
8928 gen_shift_imm(env
, ctx
, OPC_DSLL
, rx
, ry
, sa
);
8930 generate_exception(ctx
, EXCP_RI
);
8934 gen_shift_imm(env
, ctx
, OPC_SRL
, rx
, ry
, sa
);
8937 gen_shift_imm(env
, ctx
, OPC_SRA
, rx
, ry
, sa
);
8941 #if defined(TARGET_MIPS64)
8944 gen_ld(env
, ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
8949 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
8951 if ((ctx
->opcode
>> 4) & 1) {
8952 #if defined(TARGET_MIPS64)
8954 gen_arith_imm(env
, ctx
, OPC_DADDIU
, ry
, rx
, imm
);
8956 generate_exception(ctx
, EXCP_RI
);
8959 gen_arith_imm(env
, ctx
, OPC_ADDIU
, ry
, rx
, imm
);
8963 case M16_OPC_ADDIU8
:
8965 int16_t imm
= (int8_t) ctx
->opcode
;
8967 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, rx
, imm
);
8972 int16_t imm
= (uint8_t) ctx
->opcode
;
8974 gen_slt_imm(env
, OPC_SLTI
, 24, rx
, imm
);
8979 int16_t imm
= (uint8_t) ctx
->opcode
;
8981 gen_slt_imm(env
, OPC_SLTIU
, 24, rx
, imm
);
8988 funct
= (ctx
->opcode
>> 8) & 0x7;
8991 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
8992 ((int8_t)ctx
->opcode
) << 1);
8995 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
8996 ((int8_t)ctx
->opcode
) << 1);
8999 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
9002 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29,
9003 ((int8_t)ctx
->opcode
) << 3);
9007 int do_ra
= ctx
->opcode
& (1 << 6);
9008 int do_s0
= ctx
->opcode
& (1 << 5);
9009 int do_s1
= ctx
->opcode
& (1 << 4);
9010 int framesize
= ctx
->opcode
& 0xf;
9012 if (framesize
== 0) {
9015 framesize
= framesize
<< 3;
9018 if (ctx
->opcode
& (1 << 7)) {
9019 gen_mips16_save(ctx
, 0, 0,
9020 do_ra
, do_s0
, do_s1
, framesize
);
9022 gen_mips16_restore(ctx
, 0, 0,
9023 do_ra
, do_s0
, do_s1
, framesize
);
9029 int rz
= xlat(ctx
->opcode
& 0x7);
9031 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
9032 ((ctx
->opcode
>> 5) & 0x7);
9033 gen_arith(env
, ctx
, OPC_ADDU
, reg32
, rz
, 0);
9037 reg32
= ctx
->opcode
& 0x1f;
9038 gen_arith(env
, ctx
, OPC_ADDU
, ry
, reg32
, 0);
9041 generate_exception(ctx
, EXCP_RI
);
9048 int16_t imm
= (uint8_t) ctx
->opcode
;
9050 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rx
, 0, imm
);
9055 int16_t imm
= (uint8_t) ctx
->opcode
;
9057 gen_logic_imm(env
, OPC_XORI
, 24, rx
, imm
);
9060 #if defined(TARGET_MIPS64)
9063 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
9067 gen_ld(env
, ctx
, OPC_LB
, ry
, rx
, offset
);
9070 gen_ld(env
, ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
9073 gen_ld(env
, ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9076 gen_ld(env
, ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
9079 gen_ld(env
, ctx
, OPC_LBU
, ry
, rx
, offset
);
9082 gen_ld(env
, ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
9085 gen_ld(env
, ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
9087 #if defined (TARGET_MIPS64)
9090 gen_ld(env
, ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
9094 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
9097 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
9100 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
9103 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
9107 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
9110 switch (ctx
->opcode
& 0x3) {
9112 mips32_op
= OPC_ADDU
;
9115 mips32_op
= OPC_SUBU
;
9117 #if defined(TARGET_MIPS64)
9119 mips32_op
= OPC_DADDU
;
9123 mips32_op
= OPC_DSUBU
;
9128 generate_exception(ctx
, EXCP_RI
);
9132 gen_arith(env
, ctx
, mips32_op
, rz
, rx
, ry
);
9141 int nd
= (ctx
->opcode
>> 7) & 0x1;
9142 int link
= (ctx
->opcode
>> 6) & 0x1;
9143 int ra
= (ctx
->opcode
>> 5) & 0x1;
9146 op
= nd
? OPC_JALRC
: OPC_JALRS
;
9151 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0);
9158 /* XXX: not clear which exception should be raised
9159 * when in debug mode...
9161 check_insn(env
, ctx
, ISA_MIPS32
);
9162 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9163 generate_exception(ctx
, EXCP_DBp
);
9165 generate_exception(ctx
, EXCP_DBp
);
9169 gen_slt(env
, OPC_SLT
, 24, rx
, ry
);
9172 gen_slt(env
, OPC_SLTU
, 24, rx
, ry
);
9175 generate_exception(ctx
, EXCP_BREAK
);
9178 gen_shift(env
, ctx
, OPC_SLLV
, ry
, rx
, ry
);
9181 gen_shift(env
, ctx
, OPC_SRLV
, ry
, rx
, ry
);
9184 gen_shift(env
, ctx
, OPC_SRAV
, ry
, rx
, ry
);
9186 #if defined (TARGET_MIPS64)
9189 gen_shift_imm(env
, ctx
, OPC_DSRL
, ry
, ry
, sa
);
9193 gen_logic(env
, OPC_XOR
, 24, rx
, ry
);
9196 gen_arith(env
, ctx
, OPC_SUBU
, rx
, 0, ry
);
9199 gen_logic(env
, OPC_AND
, rx
, rx
, ry
);
9202 gen_logic(env
, OPC_OR
, rx
, rx
, ry
);
9205 gen_logic(env
, OPC_XOR
, rx
, rx
, ry
);
9208 gen_logic(env
, OPC_NOR
, rx
, ry
, 0);
9211 gen_HILO(ctx
, OPC_MFHI
, rx
);
9215 case RR_RY_CNVT_ZEB
:
9216 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9218 case RR_RY_CNVT_ZEH
:
9219 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9221 case RR_RY_CNVT_SEB
:
9222 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9224 case RR_RY_CNVT_SEH
:
9225 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9227 #if defined (TARGET_MIPS64)
9228 case RR_RY_CNVT_ZEW
:
9230 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9232 case RR_RY_CNVT_SEW
:
9234 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
9238 generate_exception(ctx
, EXCP_RI
);
9243 gen_HILO(ctx
, OPC_MFLO
, rx
);
9245 #if defined (TARGET_MIPS64)
9248 gen_shift_imm(env
, ctx
, OPC_DSRA
, ry
, ry
, sa
);
9252 gen_shift(env
, ctx
, OPC_DSLLV
, ry
, rx
, ry
);
9256 gen_shift(env
, ctx
, OPC_DSRLV
, ry
, rx
, ry
);
9260 gen_shift(env
, ctx
, OPC_DSRAV
, ry
, rx
, ry
);
9264 gen_muldiv(ctx
, OPC_MULT
, rx
, ry
);
9267 gen_muldiv(ctx
, OPC_MULTU
, rx
, ry
);
9270 gen_muldiv(ctx
, OPC_DIV
, rx
, ry
);
9273 gen_muldiv(ctx
, OPC_DIVU
, rx
, ry
);
9275 #if defined (TARGET_MIPS64)
9278 gen_muldiv(ctx
, OPC_DMULT
, rx
, ry
);
9282 gen_muldiv(ctx
, OPC_DMULTU
, rx
, ry
);
9286 gen_muldiv(ctx
, OPC_DDIV
, rx
, ry
);
9290 gen_muldiv(ctx
, OPC_DDIVU
, rx
, ry
);
9294 generate_exception(ctx
, EXCP_RI
);
9298 case M16_OPC_EXTEND
:
9299 decode_extended_mips16_opc(env
, ctx
, is_branch
);
9302 #if defined(TARGET_MIPS64)
9304 funct
= (ctx
->opcode
>> 8) & 0x7;
9305 decode_i64_mips16(env
, ctx
, ry
, funct
, offset
, 0);
9309 generate_exception(ctx
, EXCP_RI
);
9316 /* microMIPS extension to MIPS32 */
9318 /* microMIPS32 major opcodes */
9357 /* 0x20 is reserved */
9367 /* 0x28 and 0x29 are reserved */
9377 /* 0x30 and 0x31 are reserved */
9387 /* 0x38 and 0x39 are reserved */
9398 /* POOL32A encoding of minor opcode field */
9401 /* These opcodes are distinguished only by bits 9..6; those bits are
9402 * what are recorded below. */
9428 /* The following can be distinguished by their lower 6 bits. */
9434 /* POOL32AXF encoding of minor opcode field extension */
9448 /* bits 13..12 for 0x01 */
9454 /* bits 13..12 for 0x2a */
9460 /* bits 13..12 for 0x32 */
9464 /* bits 15..12 for 0x2c */
9480 /* bits 15..12 for 0x34 */
9488 /* bits 15..12 for 0x3c */
9490 JR
= 0x0, /* alias */
9495 /* bits 15..12 for 0x05 */
9499 /* bits 15..12 for 0x0d */
9509 /* bits 15..12 for 0x15 */
9515 /* bits 15..12 for 0x1d */
9519 /* bits 15..12 for 0x2d */
9524 /* bits 15..12 for 0x35 */
9531 /* POOL32B encoding of minor opcode field (bits 15..12) */
9547 /* POOL32C encoding of minor opcode field (bits 15..12) */
9555 /* 0xa is reserved */
9562 /* 0x6 is reserved */
9568 /* POOL32F encoding of minor opcode field (bits 5..0) */
9571 /* These are the bit 7..6 values */
9582 /* These are the bit 8..6 values */
9626 CABS_COND_FMT
= 0x1c, /* MIPS3D */
9630 /* POOL32Fxf encoding of minor opcode extension field */
9668 /* POOL32I encoding of minor opcode field (bits 25..21) */
9693 /* These overlap and are distinguished by bit16 of the instruction */
9702 /* POOL16A encoding of minor opcode field */
9709 /* POOL16B encoding of minor opcode field */
9716 /* POOL16C encoding of minor opcode field */
9736 /* POOL16D encoding of minor opcode field */
9743 /* POOL16E encoding of minor opcode field */
9750 static int mmreg (int r
)
9752 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
9757 /* Used for 16-bit store instructions. */
9758 static int mmreg2 (int r
)
9760 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
9765 #define uMIPS_RD(op) ((op >> 7) & 0x7)
9766 #define uMIPS_RS(op) ((op >> 4) & 0x7)
9767 #define uMIPS_RS2(op) uMIPS_RS(op)
9768 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
9769 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
9770 #define uMIPS_RS5(op) (op & 0x1f)
9772 /* Signed immediate */
9773 #define SIMM(op, start, width) \
9774 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
9777 /* Zero-extended immediate */
9778 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
9780 static void gen_addiur1sp (CPUMIPSState
*env
, DisasContext
*ctx
)
9782 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9784 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
9787 static void gen_addiur2 (CPUMIPSState
*env
, DisasContext
*ctx
)
9789 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
9790 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9791 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9793 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
9796 static void gen_addiusp (CPUMIPSState
*env
, DisasContext
*ctx
)
9798 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
9802 decoded
= 256 + encoded
;
9803 } else if (encoded
<= 255) {
9805 } else if (encoded
<= 509) {
9806 decoded
= encoded
- 512;
9808 decoded
= encoded
- 768;
9811 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
9814 static void gen_addius5 (CPUMIPSState
*env
, DisasContext
*ctx
)
9816 int imm
= SIMM(ctx
->opcode
, 1, 4);
9817 int rd
= (ctx
->opcode
>> 5) & 0x1f;
9819 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rd
, imm
);
9822 static void gen_andi16 (CPUMIPSState
*env
, DisasContext
*ctx
)
9824 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
9825 31, 32, 63, 64, 255, 32768, 65535 };
9826 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
9827 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
9828 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
9830 gen_logic_imm(env
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
9833 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
9834 int base
, int16_t offset
)
9839 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
9840 generate_exception(ctx
, EXCP_RI
);
9844 t0
= tcg_temp_new();
9846 gen_base_offset_addr(ctx
, t0
, base
, offset
);
9848 t1
= tcg_const_tl(reglist
);
9849 t2
= tcg_const_i32(ctx
->mem_idx
);
9851 save_cpu_state(ctx
, 1);
9854 gen_helper_lwm(t0
, t1
, t2
);
9857 gen_helper_swm(t0
, t1
, t2
);
9859 #ifdef TARGET_MIPS64
9861 gen_helper_ldm(t0
, t1
, t2
);
9864 gen_helper_sdm(t0
, t1
, t2
);
9868 MIPS_DEBUG("%s, %x, %d(%s)", opn
, reglist
, offset
, regnames
[base
]);
9871 tcg_temp_free_i32(t2
);
9875 static void gen_pool16c_insn (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
9877 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
9878 int rs
= mmreg(ctx
->opcode
& 0x7);
9881 switch (((ctx
->opcode
) >> 4) & 0x3f) {
9886 gen_logic(env
, OPC_NOR
, rd
, rs
, 0);
9892 gen_logic(env
, OPC_XOR
, rd
, rd
, rs
);
9898 gen_logic(env
, OPC_AND
, rd
, rd
, rs
);
9904 gen_logic(env
, OPC_OR
, rd
, rd
, rs
);
9911 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9912 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9914 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
9923 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
9924 int offset
= ZIMM(ctx
->opcode
, 0, 4);
9926 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
9933 int reg
= ctx
->opcode
& 0x1f;
9935 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9942 int reg
= ctx
->opcode
& 0x1f;
9944 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0);
9945 /* Let normal delay slot handling in our caller take us
9946 to the branch target. */
9958 int reg
= ctx
->opcode
& 0x1f;
9960 gen_compute_branch(ctx
, opc
, 2, reg
, 31, 0);
9966 gen_HILO(ctx
, OPC_MFHI
, uMIPS_RS5(ctx
->opcode
));
9970 gen_HILO(ctx
, OPC_MFLO
, uMIPS_RS5(ctx
->opcode
));
9973 generate_exception(ctx
, EXCP_BREAK
);
9976 /* XXX: not clear which exception should be raised
9977 * when in debug mode...
9979 check_insn(env
, ctx
, ISA_MIPS32
);
9980 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
9981 generate_exception(ctx
, EXCP_DBp
);
9983 generate_exception(ctx
, EXCP_DBp
);
9989 int imm
= ZIMM(ctx
->opcode
, 0, 5);
9991 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0);
9992 gen_arith_imm(env
, ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
9993 /* Let normal delay slot handling in our caller take us
9994 to the branch target. */
9998 generate_exception(ctx
, EXCP_RI
);
10003 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
10005 TCGv t0
= tcg_temp_new();
10006 TCGv t1
= tcg_temp_new();
10008 gen_load_gpr(t0
, base
);
10011 gen_load_gpr(t1
, index
);
10012 tcg_gen_shli_tl(t1
, t1
, 2);
10013 gen_op_addr_add(ctx
, t0
, t1
, t0
);
10016 save_cpu_state(ctx
, 0);
10017 op_ld_lw(t1
, t0
, ctx
);
10018 gen_store_gpr(t1
, rd
);
10024 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
10025 int base
, int16_t offset
)
10027 const char *opn
= "ldst_pair";
10030 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31 || rd
== base
) {
10031 generate_exception(ctx
, EXCP_RI
);
10035 t0
= tcg_temp_new();
10036 t1
= tcg_temp_new();
10038 gen_base_offset_addr(ctx
, t0
, base
, offset
);
10042 save_cpu_state(ctx
, 0);
10043 op_ld_lw(t1
, t0
, ctx
);
10044 gen_store_gpr(t1
, rd
);
10045 tcg_gen_movi_tl(t1
, 4);
10046 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10047 op_ld_lw(t1
, t0
, ctx
);
10048 gen_store_gpr(t1
, rd
+1);
10052 save_cpu_state(ctx
, 0);
10053 gen_load_gpr(t1
, rd
);
10054 op_st_sw(t1
, t0
, ctx
);
10055 tcg_gen_movi_tl(t1
, 4);
10056 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10057 gen_load_gpr(t1
, rd
+1);
10058 op_st_sw(t1
, t0
, ctx
);
10061 #ifdef TARGET_MIPS64
10063 save_cpu_state(ctx
, 0);
10064 op_ld_ld(t1
, t0
, ctx
);
10065 gen_store_gpr(t1
, rd
);
10066 tcg_gen_movi_tl(t1
, 8);
10067 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10068 op_ld_ld(t1
, t0
, ctx
);
10069 gen_store_gpr(t1
, rd
+1);
10073 save_cpu_state(ctx
, 0);
10074 gen_load_gpr(t1
, rd
);
10075 op_st_sd(t1
, t0
, ctx
);
10076 tcg_gen_movi_tl(t1
, 8);
10077 gen_op_addr_add(ctx
, t0
, t0
, t1
);
10078 gen_load_gpr(t1
, rd
+1);
10079 op_st_sd(t1
, t0
, ctx
);
10084 (void)opn
; /* avoid a compiler warning */
10085 MIPS_DEBUG("%s, %s, %d(%s)", opn
, regnames
[rd
], offset
, regnames
[base
]);
10090 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
,
10093 int extension
= (ctx
->opcode
>> 6) & 0x3f;
10094 int minor
= (ctx
->opcode
>> 12) & 0xf;
10095 uint32_t mips32_op
;
10097 switch (extension
) {
10099 mips32_op
= OPC_TEQ
;
10102 mips32_op
= OPC_TGE
;
10105 mips32_op
= OPC_TGEU
;
10108 mips32_op
= OPC_TLT
;
10111 mips32_op
= OPC_TLTU
;
10114 mips32_op
= OPC_TNE
;
10116 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
10118 #ifndef CONFIG_USER_ONLY
10122 /* Treat as NOP. */
10125 gen_mfc0(env
, ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
10130 TCGv t0
= tcg_temp_new();
10132 gen_load_gpr(t0
, rt
);
10133 gen_mtc0(env
, ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
10141 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
10144 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
10147 mips32_op
= OPC_CLO
;
10150 mips32_op
= OPC_CLZ
;
10152 check_insn(env
, ctx
, ISA_MIPS32
);
10153 gen_cl(ctx
, mips32_op
, rt
, rs
);
10156 gen_rdhwr(env
, ctx
, rt
, rs
);
10159 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
10162 mips32_op
= OPC_MULT
;
10165 mips32_op
= OPC_MULTU
;
10168 mips32_op
= OPC_DIV
;
10171 mips32_op
= OPC_DIVU
;
10174 mips32_op
= OPC_MADD
;
10177 mips32_op
= OPC_MADDU
;
10180 mips32_op
= OPC_MSUB
;
10183 mips32_op
= OPC_MSUBU
;
10185 check_insn(env
, ctx
, ISA_MIPS32
);
10186 gen_muldiv(ctx
, mips32_op
, rs
, rt
);
10189 goto pool32axf_invalid
;
10200 generate_exception_err(ctx
, EXCP_CpU
, 2);
10203 goto pool32axf_invalid
;
10210 gen_compute_branch (ctx
, OPC_JALR
, 4, rs
, rt
, 0);
10215 gen_compute_branch (ctx
, OPC_JALRS
, 4, rs
, rt
, 0);
10219 goto pool32axf_invalid
;
10225 check_insn(env
, ctx
, ISA_MIPS32R2
);
10226 gen_load_srsgpr(rt
, rs
);
10229 check_insn(env
, ctx
, ISA_MIPS32R2
);
10230 gen_store_srsgpr(rt
, rs
);
10233 goto pool32axf_invalid
;
10236 #ifndef CONFIG_USER_ONLY
10240 mips32_op
= OPC_TLBP
;
10243 mips32_op
= OPC_TLBR
;
10246 mips32_op
= OPC_TLBWI
;
10249 mips32_op
= OPC_TLBWR
;
10252 mips32_op
= OPC_WAIT
;
10255 mips32_op
= OPC_DERET
;
10258 mips32_op
= OPC_ERET
;
10260 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
10263 goto pool32axf_invalid
;
10270 TCGv t0
= tcg_temp_new();
10272 save_cpu_state(ctx
, 1);
10274 gen_store_gpr(t0
, rs
);
10275 /* Stop translation as we may have switched the execution mode */
10276 ctx
->bstate
= BS_STOP
;
10282 TCGv t0
= tcg_temp_new();
10284 save_cpu_state(ctx
, 1);
10286 gen_store_gpr(t0
, rs
);
10287 /* Stop translation as we may have switched the execution mode */
10288 ctx
->bstate
= BS_STOP
;
10293 goto pool32axf_invalid
;
10303 generate_exception(ctx
, EXCP_SYSCALL
);
10304 ctx
->bstate
= BS_STOP
;
10307 check_insn(env
, ctx
, ISA_MIPS32
);
10308 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10309 generate_exception(ctx
, EXCP_DBp
);
10311 generate_exception(ctx
, EXCP_DBp
);
10315 goto pool32axf_invalid
;
10321 gen_HILO(ctx
, OPC_MFHI
, rs
);
10324 gen_HILO(ctx
, OPC_MFLO
, rs
);
10327 gen_HILO(ctx
, OPC_MTHI
, rs
);
10330 gen_HILO(ctx
, OPC_MTLO
, rs
);
10333 goto pool32axf_invalid
;
10338 MIPS_INVAL("pool32axf");
10339 generate_exception(ctx
, EXCP_RI
);
10344 /* Values for microMIPS fmt field. Variable-width, depending on which
10345 formats the instruction supports. */
10364 static void gen_pool32fxf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
10366 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
10367 uint32_t mips32_op
;
10369 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
10370 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
10371 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
10373 switch (extension
) {
10374 case FLOAT_1BIT_FMT(CFC1
, 0):
10375 mips32_op
= OPC_CFC1
;
10377 case FLOAT_1BIT_FMT(CTC1
, 0):
10378 mips32_op
= OPC_CTC1
;
10380 case FLOAT_1BIT_FMT(MFC1
, 0):
10381 mips32_op
= OPC_MFC1
;
10383 case FLOAT_1BIT_FMT(MTC1
, 0):
10384 mips32_op
= OPC_MTC1
;
10386 case FLOAT_1BIT_FMT(MFHC1
, 0):
10387 mips32_op
= OPC_MFHC1
;
10389 case FLOAT_1BIT_FMT(MTHC1
, 0):
10390 mips32_op
= OPC_MTHC1
;
10392 gen_cp1(ctx
, mips32_op
, rt
, rs
);
10395 /* Reciprocal square root */
10396 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
10397 mips32_op
= OPC_RSQRT_S
;
10399 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
10400 mips32_op
= OPC_RSQRT_D
;
10404 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
10405 mips32_op
= OPC_SQRT_S
;
10407 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
10408 mips32_op
= OPC_SQRT_D
;
10412 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
10413 mips32_op
= OPC_RECIP_S
;
10415 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
10416 mips32_op
= OPC_RECIP_D
;
10420 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
10421 mips32_op
= OPC_FLOOR_L_S
;
10423 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
10424 mips32_op
= OPC_FLOOR_L_D
;
10426 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
10427 mips32_op
= OPC_FLOOR_W_S
;
10429 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
10430 mips32_op
= OPC_FLOOR_W_D
;
10434 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
10435 mips32_op
= OPC_CEIL_L_S
;
10437 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
10438 mips32_op
= OPC_CEIL_L_D
;
10440 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
10441 mips32_op
= OPC_CEIL_W_S
;
10443 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
10444 mips32_op
= OPC_CEIL_W_D
;
10448 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
10449 mips32_op
= OPC_TRUNC_L_S
;
10451 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
10452 mips32_op
= OPC_TRUNC_L_D
;
10454 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
10455 mips32_op
= OPC_TRUNC_W_S
;
10457 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
10458 mips32_op
= OPC_TRUNC_W_D
;
10462 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
10463 mips32_op
= OPC_ROUND_L_S
;
10465 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
10466 mips32_op
= OPC_ROUND_L_D
;
10468 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
10469 mips32_op
= OPC_ROUND_W_S
;
10471 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
10472 mips32_op
= OPC_ROUND_W_D
;
10475 /* Integer to floating-point conversion */
10476 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
10477 mips32_op
= OPC_CVT_L_S
;
10479 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
10480 mips32_op
= OPC_CVT_L_D
;
10482 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
10483 mips32_op
= OPC_CVT_W_S
;
10485 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
10486 mips32_op
= OPC_CVT_W_D
;
10489 /* Paired-foo conversions */
10490 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
10491 mips32_op
= OPC_CVT_S_PL
;
10493 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
10494 mips32_op
= OPC_CVT_S_PU
;
10496 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
10497 mips32_op
= OPC_CVT_PW_PS
;
10499 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
10500 mips32_op
= OPC_CVT_PS_PW
;
10503 /* Floating-point moves */
10504 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
10505 mips32_op
= OPC_MOV_S
;
10507 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
10508 mips32_op
= OPC_MOV_D
;
10510 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
10511 mips32_op
= OPC_MOV_PS
;
10514 /* Absolute value */
10515 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
10516 mips32_op
= OPC_ABS_S
;
10518 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
10519 mips32_op
= OPC_ABS_D
;
10521 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
10522 mips32_op
= OPC_ABS_PS
;
10526 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
10527 mips32_op
= OPC_NEG_S
;
10529 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
10530 mips32_op
= OPC_NEG_D
;
10532 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
10533 mips32_op
= OPC_NEG_PS
;
10536 /* Reciprocal square root step */
10537 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
10538 mips32_op
= OPC_RSQRT1_S
;
10540 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
10541 mips32_op
= OPC_RSQRT1_D
;
10543 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
10544 mips32_op
= OPC_RSQRT1_PS
;
10547 /* Reciprocal step */
10548 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
10549 mips32_op
= OPC_RECIP1_S
;
10551 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
10552 mips32_op
= OPC_RECIP1_S
;
10554 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
10555 mips32_op
= OPC_RECIP1_PS
;
10558 /* Conversions from double */
10559 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
10560 mips32_op
= OPC_CVT_D_S
;
10562 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
10563 mips32_op
= OPC_CVT_D_W
;
10565 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
10566 mips32_op
= OPC_CVT_D_L
;
10569 /* Conversions from single */
10570 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
10571 mips32_op
= OPC_CVT_S_D
;
10573 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
10574 mips32_op
= OPC_CVT_S_W
;
10576 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
10577 mips32_op
= OPC_CVT_S_L
;
10579 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
10582 /* Conditional moves on floating-point codes */
10583 case COND_FLOAT_MOV(MOVT
, 0):
10584 case COND_FLOAT_MOV(MOVT
, 1):
10585 case COND_FLOAT_MOV(MOVT
, 2):
10586 case COND_FLOAT_MOV(MOVT
, 3):
10587 case COND_FLOAT_MOV(MOVT
, 4):
10588 case COND_FLOAT_MOV(MOVT
, 5):
10589 case COND_FLOAT_MOV(MOVT
, 6):
10590 case COND_FLOAT_MOV(MOVT
, 7):
10591 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
10593 case COND_FLOAT_MOV(MOVF
, 0):
10594 case COND_FLOAT_MOV(MOVF
, 1):
10595 case COND_FLOAT_MOV(MOVF
, 2):
10596 case COND_FLOAT_MOV(MOVF
, 3):
10597 case COND_FLOAT_MOV(MOVF
, 4):
10598 case COND_FLOAT_MOV(MOVF
, 5):
10599 case COND_FLOAT_MOV(MOVF
, 6):
10600 case COND_FLOAT_MOV(MOVF
, 7):
10601 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
10604 MIPS_INVAL("pool32fxf");
10605 generate_exception(ctx
, EXCP_RI
);
10610 static void decode_micromips32_opc (CPUMIPSState
*env
, DisasContext
*ctx
,
10611 uint16_t insn_hw1
, int *is_branch
)
10615 int rt
, rs
, rd
, rr
;
10617 uint32_t op
, minor
, mips32_op
;
10618 uint32_t cond
, fmt
, cc
;
10620 insn
= lduw_code(ctx
->pc
+ 2);
10621 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
10623 rt
= (ctx
->opcode
>> 21) & 0x1f;
10624 rs
= (ctx
->opcode
>> 16) & 0x1f;
10625 rd
= (ctx
->opcode
>> 11) & 0x1f;
10626 rr
= (ctx
->opcode
>> 6) & 0x1f;
10627 imm
= (int16_t) ctx
->opcode
;
10629 op
= (ctx
->opcode
>> 26) & 0x3f;
10632 minor
= ctx
->opcode
& 0x3f;
10635 minor
= (ctx
->opcode
>> 6) & 0xf;
10638 mips32_op
= OPC_SLL
;
10641 mips32_op
= OPC_SRA
;
10644 mips32_op
= OPC_SRL
;
10647 mips32_op
= OPC_ROTR
;
10649 gen_shift_imm(env
, ctx
, mips32_op
, rt
, rs
, rd
);
10652 goto pool32a_invalid
;
10656 minor
= (ctx
->opcode
>> 6) & 0xf;
10660 mips32_op
= OPC_ADD
;
10663 mips32_op
= OPC_ADDU
;
10666 mips32_op
= OPC_SUB
;
10669 mips32_op
= OPC_SUBU
;
10672 mips32_op
= OPC_MUL
;
10674 gen_arith(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10678 mips32_op
= OPC_SLLV
;
10681 mips32_op
= OPC_SRLV
;
10684 mips32_op
= OPC_SRAV
;
10687 mips32_op
= OPC_ROTRV
;
10689 gen_shift(env
, ctx
, mips32_op
, rd
, rs
, rt
);
10691 /* Logical operations */
10693 mips32_op
= OPC_AND
;
10696 mips32_op
= OPC_OR
;
10699 mips32_op
= OPC_NOR
;
10702 mips32_op
= OPC_XOR
;
10704 gen_logic(env
, mips32_op
, rd
, rs
, rt
);
10706 /* Set less than */
10708 mips32_op
= OPC_SLT
;
10711 mips32_op
= OPC_SLTU
;
10713 gen_slt(env
, mips32_op
, rd
, rs
, rt
);
10716 goto pool32a_invalid
;
10720 minor
= (ctx
->opcode
>> 6) & 0xf;
10722 /* Conditional moves */
10724 mips32_op
= OPC_MOVN
;
10727 mips32_op
= OPC_MOVZ
;
10729 gen_cond_move(env
, mips32_op
, rd
, rs
, rt
);
10732 gen_ldxs(ctx
, rs
, rt
, rd
);
10735 goto pool32a_invalid
;
10739 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
10742 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
10745 gen_pool32axf(env
, ctx
, rt
, rs
, is_branch
);
10748 generate_exception(ctx
, EXCP_BREAK
);
10752 MIPS_INVAL("pool32a");
10753 generate_exception(ctx
, EXCP_RI
);
10758 minor
= (ctx
->opcode
>> 12) & 0xf;
10761 /* Treat as no-op. */
10765 /* COP2: Not implemented. */
10766 generate_exception_err(ctx
, EXCP_CpU
, 2);
10770 #ifdef TARGET_MIPS64
10774 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10778 #ifdef TARGET_MIPS64
10782 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
10785 MIPS_INVAL("pool32b");
10786 generate_exception(ctx
, EXCP_RI
);
10791 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
10792 minor
= ctx
->opcode
& 0x3f;
10793 check_cp1_enabled(ctx
);
10796 mips32_op
= OPC_ALNV_PS
;
10799 mips32_op
= OPC_MADD_S
;
10802 mips32_op
= OPC_MADD_D
;
10805 mips32_op
= OPC_MADD_PS
;
10808 mips32_op
= OPC_MSUB_S
;
10811 mips32_op
= OPC_MSUB_D
;
10814 mips32_op
= OPC_MSUB_PS
;
10817 mips32_op
= OPC_NMADD_S
;
10820 mips32_op
= OPC_NMADD_D
;
10823 mips32_op
= OPC_NMADD_PS
;
10826 mips32_op
= OPC_NMSUB_S
;
10829 mips32_op
= OPC_NMSUB_D
;
10832 mips32_op
= OPC_NMSUB_PS
;
10834 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
10836 case CABS_COND_FMT
:
10837 cond
= (ctx
->opcode
>> 6) & 0xf;
10838 cc
= (ctx
->opcode
>> 13) & 0x7;
10839 fmt
= (ctx
->opcode
>> 10) & 0x3;
10842 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
10845 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
10848 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
10851 goto pool32f_invalid
;
10855 cond
= (ctx
->opcode
>> 6) & 0xf;
10856 cc
= (ctx
->opcode
>> 13) & 0x7;
10857 fmt
= (ctx
->opcode
>> 10) & 0x3;
10860 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
10863 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
10866 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
10869 goto pool32f_invalid
;
10873 gen_pool32fxf(env
, ctx
, rt
, rs
);
10877 switch ((ctx
->opcode
>> 6) & 0x7) {
10879 mips32_op
= OPC_PLL_PS
;
10882 mips32_op
= OPC_PLU_PS
;
10885 mips32_op
= OPC_PUL_PS
;
10888 mips32_op
= OPC_PUU_PS
;
10891 mips32_op
= OPC_CVT_PS_S
;
10893 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10896 goto pool32f_invalid
;
10901 switch ((ctx
->opcode
>> 6) & 0x7) {
10903 mips32_op
= OPC_LWXC1
;
10906 mips32_op
= OPC_SWXC1
;
10909 mips32_op
= OPC_LDXC1
;
10912 mips32_op
= OPC_SDXC1
;
10915 mips32_op
= OPC_LUXC1
;
10918 mips32_op
= OPC_SUXC1
;
10920 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
10923 goto pool32f_invalid
;
10928 fmt
= (ctx
->opcode
>> 9) & 0x3;
10929 switch ((ctx
->opcode
>> 6) & 0x7) {
10933 mips32_op
= OPC_RSQRT2_S
;
10936 mips32_op
= OPC_RSQRT2_D
;
10939 mips32_op
= OPC_RSQRT2_PS
;
10942 goto pool32f_invalid
;
10948 mips32_op
= OPC_RECIP2_S
;
10951 mips32_op
= OPC_RECIP2_D
;
10954 mips32_op
= OPC_RECIP2_PS
;
10957 goto pool32f_invalid
;
10961 mips32_op
= OPC_ADDR_PS
;
10964 mips32_op
= OPC_MULR_PS
;
10966 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
10969 goto pool32f_invalid
;
10973 /* MOV[FT].fmt and PREFX */
10974 cc
= (ctx
->opcode
>> 13) & 0x7;
10975 fmt
= (ctx
->opcode
>> 9) & 0x3;
10976 switch ((ctx
->opcode
>> 6) & 0x7) {
10980 gen_movcf_s(rs
, rt
, cc
, 0);
10983 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
10986 gen_movcf_ps(rs
, rt
, cc
, 0);
10989 goto pool32f_invalid
;
10995 gen_movcf_s(rs
, rt
, cc
, 1);
10998 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
11001 gen_movcf_ps(rs
, rt
, cc
, 1);
11004 goto pool32f_invalid
;
11010 goto pool32f_invalid
;
11013 #define FINSN_3ARG_SDPS(prfx) \
11014 switch ((ctx->opcode >> 8) & 0x3) { \
11016 mips32_op = OPC_##prfx##_S; \
11019 mips32_op = OPC_##prfx##_D; \
11021 case FMT_SDPS_PS: \
11022 mips32_op = OPC_##prfx##_PS; \
11025 goto pool32f_invalid; \
11028 /* regular FP ops */
11029 switch ((ctx
->opcode
>> 6) & 0x3) {
11031 FINSN_3ARG_SDPS(ADD
);
11034 FINSN_3ARG_SDPS(SUB
);
11037 FINSN_3ARG_SDPS(MUL
);
11040 fmt
= (ctx
->opcode
>> 8) & 0x3;
11042 mips32_op
= OPC_DIV_D
;
11043 } else if (fmt
== 0) {
11044 mips32_op
= OPC_DIV_S
;
11046 goto pool32f_invalid
;
11050 goto pool32f_invalid
;
11055 switch ((ctx
->opcode
>> 6) & 0x3) {
11057 FINSN_3ARG_SDPS(MOVN
);
11060 FINSN_3ARG_SDPS(MOVZ
);
11063 goto pool32f_invalid
;
11067 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
11071 MIPS_INVAL("pool32f");
11072 generate_exception(ctx
, EXCP_RI
);
11076 generate_exception_err(ctx
, EXCP_CpU
, 1);
11080 minor
= (ctx
->opcode
>> 21) & 0x1f;
11083 mips32_op
= OPC_BLTZ
;
11086 mips32_op
= OPC_BLTZAL
;
11089 mips32_op
= OPC_BLTZALS
;
11092 mips32_op
= OPC_BGEZ
;
11095 mips32_op
= OPC_BGEZAL
;
11098 mips32_op
= OPC_BGEZALS
;
11101 mips32_op
= OPC_BLEZ
;
11104 mips32_op
= OPC_BGTZ
;
11106 gen_compute_branch(ctx
, mips32_op
, 4, rs
, -1, imm
<< 1);
11112 mips32_op
= OPC_TLTI
;
11115 mips32_op
= OPC_TGEI
;
11118 mips32_op
= OPC_TLTIU
;
11121 mips32_op
= OPC_TGEIU
;
11124 mips32_op
= OPC_TNEI
;
11127 mips32_op
= OPC_TEQI
;
11129 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
11134 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
11135 4, rs
, 0, imm
<< 1);
11136 /* Compact branches don't have a delay slot, so just let
11137 the normal delay slot handling take us to the branch
11141 gen_logic_imm(env
, OPC_LUI
, rs
, -1, imm
);
11147 /* COP2: Not implemented. */
11148 generate_exception_err(ctx
, EXCP_CpU
, 2);
11151 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
11154 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
11157 mips32_op
= OPC_BC1FANY4
;
11160 mips32_op
= OPC_BC1TANY4
;
11163 check_insn(env
, ctx
, ASE_MIPS3D
);
11166 gen_compute_branch1(env
, ctx
, mips32_op
,
11167 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
11172 /* MIPS DSP: not implemented */
11175 MIPS_INVAL("pool32i");
11176 generate_exception(ctx
, EXCP_RI
);
11181 minor
= (ctx
->opcode
>> 12) & 0xf;
11184 mips32_op
= OPC_LWL
;
11187 mips32_op
= OPC_SWL
;
11190 mips32_op
= OPC_LWR
;
11193 mips32_op
= OPC_SWR
;
11195 #if defined(TARGET_MIPS64)
11197 mips32_op
= OPC_LDL
;
11200 mips32_op
= OPC_SDL
;
11203 mips32_op
= OPC_LDR
;
11206 mips32_op
= OPC_SDR
;
11209 mips32_op
= OPC_LWU
;
11212 mips32_op
= OPC_LLD
;
11216 mips32_op
= OPC_LL
;
11219 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11222 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11225 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11227 #if defined(TARGET_MIPS64)
11229 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
11233 /* Treat as no-op */
11236 MIPS_INVAL("pool32c");
11237 generate_exception(ctx
, EXCP_RI
);
11242 mips32_op
= OPC_ADDI
;
11245 mips32_op
= OPC_ADDIU
;
11247 gen_arith_imm(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11250 /* Logical operations */
11252 mips32_op
= OPC_ORI
;
11255 mips32_op
= OPC_XORI
;
11258 mips32_op
= OPC_ANDI
;
11260 gen_logic_imm(env
, mips32_op
, rt
, rs
, imm
);
11263 /* Set less than immediate */
11265 mips32_op
= OPC_SLTI
;
11268 mips32_op
= OPC_SLTIU
;
11270 gen_slt_imm(env
, mips32_op
, rt
, rs
, imm
);
11273 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
11274 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
);
11278 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
11279 gen_compute_branch(ctx
, OPC_JALS
, 4, rt
, rs
, offset
);
11283 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1);
11287 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1);
11291 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
11292 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11296 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
11297 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1);
11300 /* Floating point (COP1) */
11302 mips32_op
= OPC_LWC1
;
11305 mips32_op
= OPC_LDC1
;
11308 mips32_op
= OPC_SWC1
;
11311 mips32_op
= OPC_SDC1
;
11313 gen_cop1_ldst(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11317 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
11318 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
11320 gen_addiupc(ctx
, reg
, offset
, 0, 0);
11323 /* Loads and stores */
11325 mips32_op
= OPC_LB
;
11328 mips32_op
= OPC_LBU
;
11331 mips32_op
= OPC_LH
;
11334 mips32_op
= OPC_LHU
;
11337 mips32_op
= OPC_LW
;
11339 #ifdef TARGET_MIPS64
11341 mips32_op
= OPC_LD
;
11344 mips32_op
= OPC_SD
;
11348 mips32_op
= OPC_SB
;
11351 mips32_op
= OPC_SH
;
11354 mips32_op
= OPC_SW
;
11357 gen_ld(env
, ctx
, mips32_op
, rt
, rs
, imm
);
11360 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
11363 generate_exception(ctx
, EXCP_RI
);
11368 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
11372 /* make sure instructions are on a halfword boundary */
11373 if (ctx
->pc
& 0x1) {
11374 env
->CP0_BadVAddr
= ctx
->pc
;
11375 generate_exception(ctx
, EXCP_AdEL
);
11376 ctx
->bstate
= BS_STOP
;
11380 op
= (ctx
->opcode
>> 10) & 0x3f;
11381 /* Enforce properly-sized instructions in a delay slot */
11382 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
11383 int bits
= ctx
->hflags
& MIPS_HFLAG_BMASK_EXT
;
11417 case POOL48A
: /* ??? */
11422 if (bits
& MIPS_HFLAG_BDS16
) {
11423 generate_exception(ctx
, EXCP_RI
);
11424 /* Just stop translation; the user is confused. */
11425 ctx
->bstate
= BS_STOP
;
11450 if (bits
& MIPS_HFLAG_BDS32
) {
11451 generate_exception(ctx
, EXCP_RI
);
11452 /* Just stop translation; the user is confused. */
11453 ctx
->bstate
= BS_STOP
;
11464 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11465 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
11466 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
11469 switch (ctx
->opcode
& 0x1) {
11478 gen_arith(env
, ctx
, opc
, rd
, rs1
, rs2
);
11483 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11484 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
11485 int amount
= (ctx
->opcode
>> 1) & 0x7;
11487 amount
= amount
== 0 ? 8 : amount
;
11489 switch (ctx
->opcode
& 0x1) {
11498 gen_shift_imm(env
, ctx
, opc
, rd
, rs
, amount
);
11502 gen_pool16c_insn(env
, ctx
, is_branch
);
11506 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11507 int rb
= 28; /* GP */
11508 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
11510 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11514 if (ctx
->opcode
& 1) {
11515 generate_exception(ctx
, EXCP_RI
);
11518 int enc_dest
= uMIPS_RD(ctx
->opcode
);
11519 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
11520 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
11521 int rd
, rs
, re
, rt
;
11522 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
11523 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
11524 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
11526 rd
= rd_enc
[enc_dest
];
11527 re
= re_enc
[enc_dest
];
11528 rs
= rs_rt_enc
[enc_rs
];
11529 rt
= rs_rt_enc
[enc_rt
];
11531 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11532 gen_arith_imm(env
, ctx
, OPC_ADDIU
, re
, rt
, 0);
11537 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11538 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11539 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11540 offset
= (offset
== 0xf ? -1 : offset
);
11542 gen_ld(env
, ctx
, OPC_LBU
, rd
, rb
, offset
);
11547 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11548 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11549 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11551 gen_ld(env
, ctx
, OPC_LHU
, rd
, rb
, offset
);
11556 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11557 int rb
= 29; /* SP */
11558 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11560 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11565 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
11566 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11567 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11569 gen_ld(env
, ctx
, OPC_LW
, rd
, rb
, offset
);
11574 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11575 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11576 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
11578 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
11583 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11584 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11585 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
11587 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
11592 int rd
= (ctx
->opcode
>> 5) & 0x1f;
11593 int rb
= 29; /* SP */
11594 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
11596 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
11601 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
11602 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
11603 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
11605 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
11610 int rd
= uMIPS_RD5(ctx
->opcode
);
11611 int rs
= uMIPS_RS5(ctx
->opcode
);
11613 gen_arith_imm(env
, ctx
, OPC_ADDIU
, rd
, rs
, 0);
11617 gen_andi16(env
, ctx
);
11620 switch (ctx
->opcode
& 0x1) {
11622 gen_addius5(env
, ctx
);
11625 gen_addiusp(env
, ctx
);
11630 switch (ctx
->opcode
& 0x1) {
11632 gen_addiur2(env
, ctx
);
11635 gen_addiur1sp(env
, ctx
);
11640 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
11641 SIMM(ctx
->opcode
, 0, 10) << 1);
11646 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
11647 mmreg(uMIPS_RD(ctx
->opcode
)),
11648 0, SIMM(ctx
->opcode
, 0, 7) << 1);
11653 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
11654 int imm
= ZIMM(ctx
->opcode
, 0, 7);
11656 imm
= (imm
== 0x7f ? -1 : imm
);
11657 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
11667 generate_exception(ctx
, EXCP_RI
);
11670 decode_micromips32_opc (env
, ctx
, op
, is_branch
);
11677 /* SmartMIPS extension to MIPS32 */
11679 #if defined(TARGET_MIPS64)
11681 /* MDMX extension to MIPS64 */
11685 static void decode_opc (CPUMIPSState
*env
, DisasContext
*ctx
, int *is_branch
)
11688 int rs
, rt
, rd
, sa
;
11689 uint32_t op
, op1
, op2
;
11692 /* make sure instructions are on a word boundary */
11693 if (ctx
->pc
& 0x3) {
11694 env
->CP0_BadVAddr
= ctx
->pc
;
11695 generate_exception(ctx
, EXCP_AdEL
);
11699 /* Handle blikely not taken case */
11700 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
11701 int l1
= gen_new_label();
11703 MIPS_DEBUG("blikely condition (" TARGET_FMT_lx
")", ctx
->pc
+ 4);
11704 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
11705 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
11706 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
11710 if (unlikely(qemu_loglevel_mask(CPU_LOG_TB_OP
)))
11711 tcg_gen_debug_insn_start(ctx
->pc
);
11713 op
= MASK_OP_MAJOR(ctx
->opcode
);
11714 rs
= (ctx
->opcode
>> 21) & 0x1f;
11715 rt
= (ctx
->opcode
>> 16) & 0x1f;
11716 rd
= (ctx
->opcode
>> 11) & 0x1f;
11717 sa
= (ctx
->opcode
>> 6) & 0x1f;
11718 imm
= (int16_t)ctx
->opcode
;
11721 op1
= MASK_SPECIAL(ctx
->opcode
);
11723 case OPC_SLL
: /* Shift with immediate */
11725 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11728 switch ((ctx
->opcode
>> 21) & 0x1f) {
11730 /* rotr is decoded as srl on non-R2 CPUs */
11731 if (env
->insn_flags
& ISA_MIPS32R2
) {
11736 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11739 generate_exception(ctx
, EXCP_RI
);
11743 case OPC_MOVN
: /* Conditional move */
11745 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
|
11746 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
11747 gen_cond_move(env
, op1
, rd
, rs
, rt
);
11749 case OPC_ADD
... OPC_SUBU
:
11750 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11752 case OPC_SLLV
: /* Shifts */
11754 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11757 switch ((ctx
->opcode
>> 6) & 0x1f) {
11759 /* rotrv is decoded as srlv on non-R2 CPUs */
11760 if (env
->insn_flags
& ISA_MIPS32R2
) {
11765 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11768 generate_exception(ctx
, EXCP_RI
);
11772 case OPC_SLT
: /* Set on less than */
11774 gen_slt(env
, op1
, rd
, rs
, rt
);
11776 case OPC_AND
: /* Logic*/
11780 gen_logic(env
, op1
, rd
, rs
, rt
);
11782 case OPC_MULT
... OPC_DIVU
:
11784 check_insn(env
, ctx
, INSN_VR54XX
);
11785 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
11786 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
11788 gen_muldiv(ctx
, op1
, rs
, rt
);
11790 case OPC_JR
... OPC_JALR
:
11791 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
);
11794 case OPC_TGE
... OPC_TEQ
: /* Traps */
11796 gen_trap(ctx
, op1
, rs
, rt
, -1);
11798 case OPC_MFHI
: /* Move from HI/LO */
11800 gen_HILO(ctx
, op1
, rd
);
11803 case OPC_MTLO
: /* Move to HI/LO */
11804 gen_HILO(ctx
, op1
, rs
);
11806 case OPC_PMON
: /* Pmon entry point, also R4010 selsl */
11807 #ifdef MIPS_STRICT_STANDARD
11808 MIPS_INVAL("PMON / selsl");
11809 generate_exception(ctx
, EXCP_RI
);
11811 gen_helper_0i(pmon
, sa
);
11815 generate_exception(ctx
, EXCP_SYSCALL
);
11816 ctx
->bstate
= BS_STOP
;
11819 generate_exception(ctx
, EXCP_BREAK
);
11822 #ifdef MIPS_STRICT_STANDARD
11823 MIPS_INVAL("SPIM");
11824 generate_exception(ctx
, EXCP_RI
);
11826 /* Implemented as RI exception for now. */
11827 MIPS_INVAL("spim (unofficial)");
11828 generate_exception(ctx
, EXCP_RI
);
11832 /* Treat as NOP. */
11836 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
11837 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
11838 check_cp1_enabled(ctx
);
11839 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
11840 (ctx
->opcode
>> 16) & 1);
11842 generate_exception_err(ctx
, EXCP_CpU
, 1);
11846 #if defined(TARGET_MIPS64)
11847 /* MIPS64 specific opcodes */
11852 check_insn(env
, ctx
, ISA_MIPS3
);
11853 check_mips_64(ctx
);
11854 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11857 switch ((ctx
->opcode
>> 21) & 0x1f) {
11859 /* drotr is decoded as dsrl on non-R2 CPUs */
11860 if (env
->insn_flags
& ISA_MIPS32R2
) {
11865 check_insn(env
, ctx
, ISA_MIPS3
);
11866 check_mips_64(ctx
);
11867 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11870 generate_exception(ctx
, EXCP_RI
);
11875 switch ((ctx
->opcode
>> 21) & 0x1f) {
11877 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
11878 if (env
->insn_flags
& ISA_MIPS32R2
) {
11883 check_insn(env
, ctx
, ISA_MIPS3
);
11884 check_mips_64(ctx
);
11885 gen_shift_imm(env
, ctx
, op1
, rd
, rt
, sa
);
11888 generate_exception(ctx
, EXCP_RI
);
11892 case OPC_DADD
... OPC_DSUBU
:
11893 check_insn(env
, ctx
, ISA_MIPS3
);
11894 check_mips_64(ctx
);
11895 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11899 check_insn(env
, ctx
, ISA_MIPS3
);
11900 check_mips_64(ctx
);
11901 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11904 switch ((ctx
->opcode
>> 6) & 0x1f) {
11906 /* drotrv is decoded as dsrlv on non-R2 CPUs */
11907 if (env
->insn_flags
& ISA_MIPS32R2
) {
11912 check_insn(env
, ctx
, ISA_MIPS3
);
11913 check_mips_64(ctx
);
11914 gen_shift(env
, ctx
, op1
, rd
, rs
, rt
);
11917 generate_exception(ctx
, EXCP_RI
);
11921 case OPC_DMULT
... OPC_DDIVU
:
11922 check_insn(env
, ctx
, ISA_MIPS3
);
11923 check_mips_64(ctx
);
11924 gen_muldiv(ctx
, op1
, rs
, rt
);
11927 default: /* Invalid */
11928 MIPS_INVAL("special");
11929 generate_exception(ctx
, EXCP_RI
);
11934 op1
= MASK_SPECIAL2(ctx
->opcode
);
11936 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
11937 case OPC_MSUB
... OPC_MSUBU
:
11938 check_insn(env
, ctx
, ISA_MIPS32
);
11939 gen_muldiv(ctx
, op1
, rs
, rt
);
11942 gen_arith(env
, ctx
, op1
, rd
, rs
, rt
);
11946 check_insn(env
, ctx
, ISA_MIPS32
);
11947 gen_cl(ctx
, op1
, rd
, rs
);
11950 /* XXX: not clear which exception should be raised
11951 * when in debug mode...
11953 check_insn(env
, ctx
, ISA_MIPS32
);
11954 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
11955 generate_exception(ctx
, EXCP_DBp
);
11957 generate_exception(ctx
, EXCP_DBp
);
11959 /* Treat as NOP. */
11962 case OPC_DIVU_G_2F
:
11963 case OPC_MULT_G_2F
:
11964 case OPC_MULTU_G_2F
:
11966 case OPC_MODU_G_2F
:
11967 check_insn(env
, ctx
, INSN_LOONGSON2F
);
11968 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
11970 #if defined(TARGET_MIPS64)
11973 check_insn(env
, ctx
, ISA_MIPS64
);
11974 check_mips_64(ctx
);
11975 gen_cl(ctx
, op1
, rd
, rs
);
11977 case OPC_DMULT_G_2F
:
11978 case OPC_DMULTU_G_2F
:
11979 case OPC_DDIV_G_2F
:
11980 case OPC_DDIVU_G_2F
:
11981 case OPC_DMOD_G_2F
:
11982 case OPC_DMODU_G_2F
:
11983 check_insn(env
, ctx
, INSN_LOONGSON2F
);
11984 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
11987 default: /* Invalid */
11988 MIPS_INVAL("special2");
11989 generate_exception(ctx
, EXCP_RI
);
11994 op1
= MASK_SPECIAL3(ctx
->opcode
);
11998 check_insn(env
, ctx
, ISA_MIPS32R2
);
11999 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12002 check_insn(env
, ctx
, ISA_MIPS32R2
);
12003 op2
= MASK_BSHFL(ctx
->opcode
);
12004 gen_bshfl(ctx
, op2
, rt
, rd
);
12007 gen_rdhwr(env
, ctx
, rt
, rd
);
12010 check_insn(env
, ctx
, ASE_MT
);
12012 TCGv t0
= tcg_temp_new();
12013 TCGv t1
= tcg_temp_new();
12015 gen_load_gpr(t0
, rt
);
12016 gen_load_gpr(t1
, rs
);
12017 gen_helper_fork(t0
, t1
);
12023 check_insn(env
, ctx
, ASE_MT
);
12025 TCGv t0
= tcg_temp_new();
12027 save_cpu_state(ctx
, 1);
12028 gen_load_gpr(t0
, rs
);
12029 gen_helper_yield(t0
, t0
);
12030 gen_store_gpr(t0
, rd
);
12034 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
12035 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
12036 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
12037 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12038 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12040 #if defined(TARGET_MIPS64)
12041 case OPC_DEXTM
... OPC_DEXT
:
12042 case OPC_DINSM
... OPC_DINS
:
12043 check_insn(env
, ctx
, ISA_MIPS64R2
);
12044 check_mips_64(ctx
);
12045 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
12048 check_insn(env
, ctx
, ISA_MIPS64R2
);
12049 check_mips_64(ctx
);
12050 op2
= MASK_DBSHFL(ctx
->opcode
);
12051 gen_bshfl(ctx
, op2
, rt
, rd
);
12053 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
12054 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
12055 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
12056 check_insn(env
, ctx
, INSN_LOONGSON2E
);
12057 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
12060 default: /* Invalid */
12061 MIPS_INVAL("special3");
12062 generate_exception(ctx
, EXCP_RI
);
12067 op1
= MASK_REGIMM(ctx
->opcode
);
12069 case OPC_BLTZ
... OPC_BGEZL
: /* REGIMM branches */
12070 case OPC_BLTZAL
... OPC_BGEZALL
:
12071 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2);
12074 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
12076 gen_trap(ctx
, op1
, rs
, -1, imm
);
12079 check_insn(env
, ctx
, ISA_MIPS32R2
);
12080 /* Treat as NOP. */
12082 default: /* Invalid */
12083 MIPS_INVAL("regimm");
12084 generate_exception(ctx
, EXCP_RI
);
12089 check_cp0_enabled(ctx
);
12090 op1
= MASK_CP0(ctx
->opcode
);
12096 #if defined(TARGET_MIPS64)
12100 #ifndef CONFIG_USER_ONLY
12101 gen_cp0(env
, ctx
, op1
, rt
, rd
);
12102 #endif /* !CONFIG_USER_ONLY */
12104 case OPC_C0_FIRST
... OPC_C0_LAST
:
12105 #ifndef CONFIG_USER_ONLY
12106 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
12107 #endif /* !CONFIG_USER_ONLY */
12110 #ifndef CONFIG_USER_ONLY
12112 TCGv t0
= tcg_temp_new();
12114 op2
= MASK_MFMC0(ctx
->opcode
);
12117 check_insn(env
, ctx
, ASE_MT
);
12118 gen_helper_dmt(t0
);
12119 gen_store_gpr(t0
, rt
);
12122 check_insn(env
, ctx
, ASE_MT
);
12123 gen_helper_emt(t0
);
12124 gen_store_gpr(t0
, rt
);
12127 check_insn(env
, ctx
, ASE_MT
);
12128 gen_helper_dvpe(t0
);
12129 gen_store_gpr(t0
, rt
);
12132 check_insn(env
, ctx
, ASE_MT
);
12133 gen_helper_evpe(t0
);
12134 gen_store_gpr(t0
, rt
);
12137 check_insn(env
, ctx
, ISA_MIPS32R2
);
12138 save_cpu_state(ctx
, 1);
12140 gen_store_gpr(t0
, rt
);
12141 /* Stop translation as we may have switched the execution mode */
12142 ctx
->bstate
= BS_STOP
;
12145 check_insn(env
, ctx
, ISA_MIPS32R2
);
12146 save_cpu_state(ctx
, 1);
12148 gen_store_gpr(t0
, rt
);
12149 /* Stop translation as we may have switched the execution mode */
12150 ctx
->bstate
= BS_STOP
;
12152 default: /* Invalid */
12153 MIPS_INVAL("mfmc0");
12154 generate_exception(ctx
, EXCP_RI
);
12159 #endif /* !CONFIG_USER_ONLY */
12162 check_insn(env
, ctx
, ISA_MIPS32R2
);
12163 gen_load_srsgpr(rt
, rd
);
12166 check_insn(env
, ctx
, ISA_MIPS32R2
);
12167 gen_store_srsgpr(rt
, rd
);
12171 generate_exception(ctx
, EXCP_RI
);
12175 case OPC_ADDI
: /* Arithmetic with immediate opcode */
12177 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12179 case OPC_SLTI
: /* Set on less than with immediate opcode */
12181 gen_slt_imm(env
, op
, rt
, rs
, imm
);
12183 case OPC_ANDI
: /* Arithmetic with immediate opcode */
12187 gen_logic_imm(env
, op
, rt
, rs
, imm
);
12189 case OPC_J
... OPC_JAL
: /* Jump */
12190 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12191 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12194 case OPC_BEQ
... OPC_BGTZ
: /* Branch */
12195 case OPC_BEQL
... OPC_BGTZL
:
12196 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2);
12199 case OPC_LB
... OPC_LWR
: /* Load and stores */
12201 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12203 case OPC_SB
... OPC_SW
:
12205 gen_st(ctx
, op
, rt
, rs
, imm
);
12208 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12211 check_insn(env
, ctx
, ISA_MIPS3
| ISA_MIPS32
);
12212 /* Treat as NOP. */
12215 check_insn(env
, ctx
, ISA_MIPS4
| ISA_MIPS32
);
12216 /* Treat as NOP. */
12219 /* Floating point (COP1). */
12224 gen_cop1_ldst(env
, ctx
, op
, rt
, rs
, imm
);
12228 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12229 check_cp1_enabled(ctx
);
12230 op1
= MASK_CP1(ctx
->opcode
);
12234 check_insn(env
, ctx
, ISA_MIPS32R2
);
12239 gen_cp1(ctx
, op1
, rt
, rd
);
12241 #if defined(TARGET_MIPS64)
12244 check_insn(env
, ctx
, ISA_MIPS3
);
12245 gen_cp1(ctx
, op1
, rt
, rd
);
12251 check_insn(env
, ctx
, ASE_MIPS3D
);
12254 gen_compute_branch1(env
, ctx
, MASK_BC1(ctx
->opcode
),
12255 (rt
>> 2) & 0x7, imm
<< 2);
12263 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
12268 generate_exception (ctx
, EXCP_RI
);
12272 generate_exception_err(ctx
, EXCP_CpU
, 1);
12282 /* COP2: Not implemented. */
12283 generate_exception_err(ctx
, EXCP_CpU
, 2);
12287 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12288 check_cp1_enabled(ctx
);
12289 op1
= MASK_CP3(ctx
->opcode
);
12297 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
12300 /* Treat as NOP. */
12315 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
12319 generate_exception (ctx
, EXCP_RI
);
12323 generate_exception_err(ctx
, EXCP_CpU
, 1);
12327 #if defined(TARGET_MIPS64)
12328 /* MIPS64 opcodes */
12330 case OPC_LDL
... OPC_LDR
:
12333 check_insn(env
, ctx
, ISA_MIPS3
);
12334 check_mips_64(ctx
);
12335 gen_ld(env
, ctx
, op
, rt
, rs
, imm
);
12337 case OPC_SDL
... OPC_SDR
:
12339 check_insn(env
, ctx
, ISA_MIPS3
);
12340 check_mips_64(ctx
);
12341 gen_st(ctx
, op
, rt
, rs
, imm
);
12344 check_insn(env
, ctx
, ISA_MIPS3
);
12345 check_mips_64(ctx
);
12346 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
12350 check_insn(env
, ctx
, ISA_MIPS3
);
12351 check_mips_64(ctx
);
12352 gen_arith_imm(env
, ctx
, op
, rt
, rs
, imm
);
12356 check_insn(env
, ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
12357 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
12358 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
);
12362 check_insn(env
, ctx
, ASE_MDMX
);
12363 /* MDMX: Not implemented. */
12364 default: /* Invalid */
12365 MIPS_INVAL("major opcode");
12366 generate_exception(ctx
, EXCP_RI
);
12372 gen_intermediate_code_internal (CPUMIPSState
*env
, TranslationBlock
*tb
,
12376 target_ulong pc_start
;
12377 uint16_t *gen_opc_end
;
12386 qemu_log("search pc %d\n", search_pc
);
12389 gen_opc_end
= gen_opc_buf
+ OPC_MAX_SIZE
;
12392 ctx
.singlestep_enabled
= env
->singlestep_enabled
;
12394 ctx
.bstate
= BS_NONE
;
12395 /* Restore delay slot state from the tb context. */
12396 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
12397 restore_cpu_state(env
, &ctx
);
12398 #ifdef CONFIG_USER_ONLY
12399 ctx
.mem_idx
= MIPS_HFLAG_UM
;
12401 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
12404 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
12405 if (max_insns
== 0)
12406 max_insns
= CF_COUNT_MASK
;
12407 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
12408 gen_icount_start();
12409 while (ctx
.bstate
== BS_NONE
) {
12410 if (unlikely(!QTAILQ_EMPTY(&env
->breakpoints
))) {
12411 QTAILQ_FOREACH(bp
, &env
->breakpoints
, entry
) {
12412 if (bp
->pc
== ctx
.pc
) {
12413 save_cpu_state(&ctx
, 1);
12414 ctx
.bstate
= BS_BRANCH
;
12415 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
12416 /* Include the breakpoint location or the tb won't
12417 * be flushed when it must be. */
12419 goto done_generating
;
12425 j
= gen_opc_ptr
- gen_opc_buf
;
12429 gen_opc_instr_start
[lj
++] = 0;
12431 gen_opc_pc
[lj
] = ctx
.pc
;
12432 gen_opc_hflags
[lj
] = ctx
.hflags
& MIPS_HFLAG_BMASK
;
12433 gen_opc_instr_start
[lj
] = 1;
12434 gen_opc_icount
[lj
] = num_insns
;
12436 if (num_insns
+ 1 == max_insns
&& (tb
->cflags
& CF_LAST_IO
))
12440 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
12441 ctx
.opcode
= ldl_code(ctx
.pc
);
12443 decode_opc(env
, &ctx
, &is_branch
);
12444 } else if (env
->insn_flags
& ASE_MICROMIPS
) {
12445 ctx
.opcode
= lduw_code(ctx
.pc
);
12446 insn_bytes
= decode_micromips_opc(env
, &ctx
, &is_branch
);
12447 } else if (env
->insn_flags
& ASE_MIPS16
) {
12448 ctx
.opcode
= lduw_code(ctx
.pc
);
12449 insn_bytes
= decode_mips16_opc(env
, &ctx
, &is_branch
);
12451 generate_exception(&ctx
, EXCP_RI
);
12452 ctx
.bstate
= BS_STOP
;
12456 handle_delay_slot(env
, &ctx
, insn_bytes
);
12458 ctx
.pc
+= insn_bytes
;
12462 /* Execute a branch and its delay slot as a single instruction.
12463 This is what GDB expects and is consistent with what the
12464 hardware does (e.g. if a delay slot instruction faults, the
12465 reported PC is the PC of the branch). */
12466 if (env
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0)
12469 if ((ctx
.pc
& (TARGET_PAGE_SIZE
- 1)) == 0)
12472 if (gen_opc_ptr
>= gen_opc_end
)
12475 if (num_insns
>= max_insns
)
12481 if (tb
->cflags
& CF_LAST_IO
)
12483 if (env
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
12484 save_cpu_state(&ctx
, ctx
.bstate
== BS_NONE
);
12485 gen_helper_0i(raise_exception
, EXCP_DEBUG
);
12487 switch (ctx
.bstate
) {
12489 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12492 save_cpu_state(&ctx
, 0);
12493 gen_goto_tb(&ctx
, 0, ctx
.pc
);
12496 tcg_gen_exit_tb(0);
12504 gen_icount_end(tb
, num_insns
);
12505 *gen_opc_ptr
= INDEX_op_end
;
12507 j
= gen_opc_ptr
- gen_opc_buf
;
12510 gen_opc_instr_start
[lj
++] = 0;
12512 tb
->size
= ctx
.pc
- pc_start
;
12513 tb
->icount
= num_insns
;
12517 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)) {
12518 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
12519 log_target_disas(pc_start
, ctx
.pc
- pc_start
, 0);
12525 void gen_intermediate_code (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12527 gen_intermediate_code_internal(env
, tb
, 0);
12530 void gen_intermediate_code_pc (CPUMIPSState
*env
, struct TranslationBlock
*tb
)
12532 gen_intermediate_code_internal(env
, tb
, 1);
12535 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
12539 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
12541 #define printfpr(fp) \
12544 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12545 " fd:%13g fs:%13g psu: %13g\n", \
12546 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
12547 (double)(fp)->fd, \
12548 (double)(fp)->fs[FP_ENDIAN_IDX], \
12549 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
12552 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
12553 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
12554 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
12555 " fd:%13g fs:%13g psu:%13g\n", \
12556 tmp.w[FP_ENDIAN_IDX], tmp.d, \
12558 (double)tmp.fs[FP_ENDIAN_IDX], \
12559 (double)tmp.fs[!FP_ENDIAN_IDX]); \
12564 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
12565 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
12566 get_float_exception_flags(&env
->active_fpu
.fp_status
));
12567 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
12568 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
12569 printfpr(&env
->active_fpu
.fpr
[i
]);
12575 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12576 /* Debug help: The architecture requires 32bit code to maintain proper
12577 sign-extended values on 64bit machines. */
12579 #define SIGN_EXT_P(val) ((((val) & ~0x7fffffff) == 0) || (((val) & ~0x7fffffff) == ~0x7fffffff))
12582 cpu_mips_check_sign_extensions (CPUMIPSState
*env
, FILE *f
,
12583 fprintf_function cpu_fprintf
,
12588 if (!SIGN_EXT_P(env
->active_tc
.PC
))
12589 cpu_fprintf(f
, "BROKEN: pc=0x" TARGET_FMT_lx
"\n", env
->active_tc
.PC
);
12590 if (!SIGN_EXT_P(env
->active_tc
.HI
[0]))
12591 cpu_fprintf(f
, "BROKEN: HI=0x" TARGET_FMT_lx
"\n", env
->active_tc
.HI
[0]);
12592 if (!SIGN_EXT_P(env
->active_tc
.LO
[0]))
12593 cpu_fprintf(f
, "BROKEN: LO=0x" TARGET_FMT_lx
"\n", env
->active_tc
.LO
[0]);
12594 if (!SIGN_EXT_P(env
->btarget
))
12595 cpu_fprintf(f
, "BROKEN: btarget=0x" TARGET_FMT_lx
"\n", env
->btarget
);
12597 for (i
= 0; i
< 32; i
++) {
12598 if (!SIGN_EXT_P(env
->active_tc
.gpr
[i
]))
12599 cpu_fprintf(f
, "BROKEN: %s=0x" TARGET_FMT_lx
"\n", regnames
[i
], env
->active_tc
.gpr
[i
]);
12602 if (!SIGN_EXT_P(env
->CP0_EPC
))
12603 cpu_fprintf(f
, "BROKEN: EPC=0x" TARGET_FMT_lx
"\n", env
->CP0_EPC
);
12604 if (!SIGN_EXT_P(env
->lladdr
))
12605 cpu_fprintf(f
, "BROKEN: LLAddr=0x" TARGET_FMT_lx
"\n", env
->lladdr
);
12609 void cpu_dump_state (CPUMIPSState
*env
, FILE *f
, fprintf_function cpu_fprintf
,
12614 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
12615 " LO=0x" TARGET_FMT_lx
" ds %04x "
12616 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
12617 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
12618 env
->hflags
, env
->btarget
, env
->bcond
);
12619 for (i
= 0; i
< 32; i
++) {
12621 cpu_fprintf(f
, "GPR%02d:", i
);
12622 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
12624 cpu_fprintf(f
, "\n");
12627 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
12628 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
12629 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x" TARGET_FMT_lx
"\n",
12630 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
12631 if (env
->hflags
& MIPS_HFLAG_FPU
)
12632 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
12633 #if defined(TARGET_MIPS64) && defined(MIPS_DEBUG_SIGN_EXTENSIONS)
12634 cpu_mips_check_sign_extensions(env
, f
, cpu_fprintf
, flags
);
12638 static void mips_tcg_init(void)
12643 /* Initialize various static tables. */
12647 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
12648 TCGV_UNUSED(cpu_gpr
[0]);
12649 for (i
= 1; i
< 32; i
++)
12650 cpu_gpr
[i
] = tcg_global_mem_new(TCG_AREG0
,
12651 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
12653 cpu_PC
= tcg_global_mem_new(TCG_AREG0
,
12654 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
12655 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
12656 cpu_HI
[i
] = tcg_global_mem_new(TCG_AREG0
,
12657 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
12659 cpu_LO
[i
] = tcg_global_mem_new(TCG_AREG0
,
12660 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
12662 cpu_ACX
[i
] = tcg_global_mem_new(TCG_AREG0
,
12663 offsetof(CPUMIPSState
, active_tc
.ACX
[i
]),
12666 cpu_dspctrl
= tcg_global_mem_new(TCG_AREG0
,
12667 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
12669 bcond
= tcg_global_mem_new(TCG_AREG0
,
12670 offsetof(CPUMIPSState
, bcond
), "bcond");
12671 btarget
= tcg_global_mem_new(TCG_AREG0
,
12672 offsetof(CPUMIPSState
, btarget
), "btarget");
12673 hflags
= tcg_global_mem_new_i32(TCG_AREG0
,
12674 offsetof(CPUMIPSState
, hflags
), "hflags");
12676 fpu_fcr0
= tcg_global_mem_new_i32(TCG_AREG0
,
12677 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
12679 fpu_fcr31
= tcg_global_mem_new_i32(TCG_AREG0
,
12680 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
12683 /* register helpers */
12684 #define GEN_HELPER 2
12685 #include "helper.h"
12690 #include "translate_init.c"
12692 CPUMIPSState
*cpu_mips_init (const char *cpu_model
)
12695 const mips_def_t
*def
;
12697 def
= cpu_mips_find_by_name(cpu_model
);
12700 env
= g_malloc0(sizeof(CPUMIPSState
));
12701 env
->cpu_model
= def
;
12702 env
->cpu_model_str
= cpu_model
;
12704 cpu_exec_init(env
);
12705 #ifndef CONFIG_USER_ONLY
12706 mmu_init(env
, def
);
12708 fpu_init(env
, def
);
12709 mvp_init(env
, def
);
12711 cpu_state_reset(env
);
12712 qemu_init_vcpu(env
);
12716 void cpu_state_reset(CPUMIPSState
*env
)
12718 if (qemu_loglevel_mask(CPU_LOG_RESET
)) {
12719 qemu_log("CPU Reset (CPU %d)\n", env
->cpu_index
);
12720 log_cpu_state(env
, 0);
12723 memset(env
, 0, offsetof(CPUMIPSState
, breakpoints
));
12726 /* Reset registers to their default values */
12727 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
12728 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
12729 #ifdef TARGET_WORDS_BIGENDIAN
12730 env
->CP0_Config0
|= (1 << CP0C0_BE
);
12732 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
12733 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
12734 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
12735 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
12736 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
12737 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
12738 << env
->cpu_model
->CP0_LLAddr_shift
;
12739 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
12740 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
12741 env
->CCRes
= env
->cpu_model
->CCRes
;
12742 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
12743 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
12744 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
12745 env
->current_tc
= 0;
12746 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
12747 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
12748 #if defined(TARGET_MIPS64)
12749 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12750 env
->SEGMask
|= 3ULL << 62;
12753 env
->PABITS
= env
->cpu_model
->PABITS
;
12754 env
->PAMask
= (target_ulong
)((1ULL << env
->cpu_model
->PABITS
) - 1);
12755 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
12756 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
12757 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
12758 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
12759 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
12760 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
12761 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
12762 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
12763 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
12764 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
12765 env
->insn_flags
= env
->cpu_model
->insn_flags
;
12767 #if defined(CONFIG_USER_ONLY)
12768 env
->hflags
= MIPS_HFLAG_UM
;
12769 /* Enable access to the SYNCI_Step register. */
12770 env
->CP0_HWREna
|= (1 << 1);
12771 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
12772 env
->hflags
|= MIPS_HFLAG_FPU
;
12774 #ifdef TARGET_MIPS64
12775 if (env
->active_fpu
.fcr0
& (1 << FCR0_F64
)) {
12776 env
->hflags
|= MIPS_HFLAG_F64
;
12780 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
12781 /* If the exception was raised from a delay slot,
12782 come back to the jump. */
12783 env
->CP0_ErrorEPC
= env
->active_tc
.PC
- 4;
12785 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
12787 env
->active_tc
.PC
= (int32_t)0xBFC00000;
12788 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
12789 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
12790 env
->CP0_Wired
= 0;
12791 env
->CP0_EBase
= 0x80000000 | (env
->cpu_index
& 0x3FF);
12792 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
12793 /* vectored interrupts not implemented, timer on int 7,
12794 no performance counters. */
12795 env
->CP0_IntCtl
= 0xe0000000;
12799 for (i
= 0; i
< 7; i
++) {
12800 env
->CP0_WatchLo
[i
] = 0;
12801 env
->CP0_WatchHi
[i
] = 0x80000000;
12803 env
->CP0_WatchLo
[7] = 0;
12804 env
->CP0_WatchHi
[7] = 0;
12806 /* Count register increments in debug mode, EJTAG version 1 */
12807 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
12808 env
->hflags
= MIPS_HFLAG_CP0
;
12810 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
12813 /* Only TC0 on VPE 0 starts as active. */
12814 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
12815 env
->tcs
[i
].CP0_TCBind
= env
->cpu_index
<< CP0TCBd_CurVPE
;
12816 env
->tcs
[i
].CP0_TCHalt
= 1;
12818 env
->active_tc
.CP0_TCHalt
= 1;
12821 if (!env
->cpu_index
) {
12822 /* VPE0 starts up enabled. */
12823 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
12824 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
12826 /* TC0 starts up unhalted. */
12828 env
->active_tc
.CP0_TCHalt
= 0;
12829 env
->tcs
[0].CP0_TCHalt
= 0;
12830 /* With thread 0 active. */
12831 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
12832 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
12836 #if defined(TARGET_MIPS64)
12837 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
12838 env
->hflags
|= MIPS_HFLAG_64
;
12841 env
->exception_index
= EXCP_NONE
;
12844 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
, int pc_pos
)
12846 env
->active_tc
.PC
= gen_opc_pc
[pc_pos
];
12847 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
12848 env
->hflags
|= gen_opc_hflags
[pc_pos
];