2 * MIPS32 emulation for qemu: main translation routines.
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2 of the License, or (at your option) any later version.
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
24 #include "qemu/osdep.h"
26 #include "disas/disas.h"
27 #include "exec/exec-all.h"
29 #include "exec/cpu_ldst.h"
31 #include "exec/helper-proto.h"
32 #include "exec/helper-gen.h"
33 #include "sysemu/kvm.h"
34 #include "exec/semihost.h"
36 #include "trace-tcg.h"
39 #define MIPS_DEBUG_DISAS 0
41 /* MIPS major opcodes */
42 #define MASK_OP_MAJOR(op) (op & (0x3F << 26))
45 /* indirect opcode tables */
46 OPC_SPECIAL
= (0x00 << 26),
47 OPC_REGIMM
= (0x01 << 26),
48 OPC_CP0
= (0x10 << 26),
49 OPC_CP1
= (0x11 << 26),
50 OPC_CP2
= (0x12 << 26),
51 OPC_CP3
= (0x13 << 26),
52 OPC_SPECIAL2
= (0x1C << 26),
53 OPC_SPECIAL3
= (0x1F << 26),
54 /* arithmetic with immediate */
55 OPC_ADDI
= (0x08 << 26),
56 OPC_ADDIU
= (0x09 << 26),
57 OPC_SLTI
= (0x0A << 26),
58 OPC_SLTIU
= (0x0B << 26),
59 /* logic with immediate */
60 OPC_ANDI
= (0x0C << 26),
61 OPC_ORI
= (0x0D << 26),
62 OPC_XORI
= (0x0E << 26),
63 OPC_LUI
= (0x0F << 26),
64 /* arithmetic with immediate */
65 OPC_DADDI
= (0x18 << 26),
66 OPC_DADDIU
= (0x19 << 26),
67 /* Jump and branches */
69 OPC_JAL
= (0x03 << 26),
70 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
71 OPC_BEQL
= (0x14 << 26),
72 OPC_BNE
= (0x05 << 26),
73 OPC_BNEL
= (0x15 << 26),
74 OPC_BLEZ
= (0x06 << 26),
75 OPC_BLEZL
= (0x16 << 26),
76 OPC_BGTZ
= (0x07 << 26),
77 OPC_BGTZL
= (0x17 << 26),
78 OPC_JALX
= (0x1D << 26),
79 OPC_DAUI
= (0x1D << 26),
81 OPC_LDL
= (0x1A << 26),
82 OPC_LDR
= (0x1B << 26),
83 OPC_LB
= (0x20 << 26),
84 OPC_LH
= (0x21 << 26),
85 OPC_LWL
= (0x22 << 26),
86 OPC_LW
= (0x23 << 26),
87 OPC_LWPC
= OPC_LW
| 0x5,
88 OPC_LBU
= (0x24 << 26),
89 OPC_LHU
= (0x25 << 26),
90 OPC_LWR
= (0x26 << 26),
91 OPC_LWU
= (0x27 << 26),
92 OPC_SB
= (0x28 << 26),
93 OPC_SH
= (0x29 << 26),
94 OPC_SWL
= (0x2A << 26),
95 OPC_SW
= (0x2B << 26),
96 OPC_SDL
= (0x2C << 26),
97 OPC_SDR
= (0x2D << 26),
98 OPC_SWR
= (0x2E << 26),
99 OPC_LL
= (0x30 << 26),
100 OPC_LLD
= (0x34 << 26),
101 OPC_LD
= (0x37 << 26),
102 OPC_LDPC
= OPC_LD
| 0x5,
103 OPC_SC
= (0x38 << 26),
104 OPC_SCD
= (0x3C << 26),
105 OPC_SD
= (0x3F << 26),
106 /* Floating point load/store */
107 OPC_LWC1
= (0x31 << 26),
108 OPC_LWC2
= (0x32 << 26),
109 OPC_LDC1
= (0x35 << 26),
110 OPC_LDC2
= (0x36 << 26),
111 OPC_SWC1
= (0x39 << 26),
112 OPC_SWC2
= (0x3A << 26),
113 OPC_SDC1
= (0x3D << 26),
114 OPC_SDC2
= (0x3E << 26),
115 /* Compact Branches */
116 OPC_BLEZALC
= (0x06 << 26),
117 OPC_BGEZALC
= (0x06 << 26),
118 OPC_BGEUC
= (0x06 << 26),
119 OPC_BGTZALC
= (0x07 << 26),
120 OPC_BLTZALC
= (0x07 << 26),
121 OPC_BLTUC
= (0x07 << 26),
122 OPC_BOVC
= (0x08 << 26),
123 OPC_BEQZALC
= (0x08 << 26),
124 OPC_BEQC
= (0x08 << 26),
125 OPC_BLEZC
= (0x16 << 26),
126 OPC_BGEZC
= (0x16 << 26),
127 OPC_BGEC
= (0x16 << 26),
128 OPC_BGTZC
= (0x17 << 26),
129 OPC_BLTZC
= (0x17 << 26),
130 OPC_BLTC
= (0x17 << 26),
131 OPC_BNVC
= (0x18 << 26),
132 OPC_BNEZALC
= (0x18 << 26),
133 OPC_BNEC
= (0x18 << 26),
134 OPC_BC
= (0x32 << 26),
135 OPC_BEQZC
= (0x36 << 26),
136 OPC_JIC
= (0x36 << 26),
137 OPC_BALC
= (0x3A << 26),
138 OPC_BNEZC
= (0x3E << 26),
139 OPC_JIALC
= (0x3E << 26),
140 /* MDMX ASE specific */
141 OPC_MDMX
= (0x1E << 26),
142 /* MSA ASE, same as MDMX */
144 /* Cache and prefetch */
145 OPC_CACHE
= (0x2F << 26),
146 OPC_PREF
= (0x33 << 26),
147 /* PC-relative address computation / loads */
148 OPC_PCREL
= (0x3B << 26),
151 /* PC-relative address computation / loads */
152 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
153 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
155 /* Instructions determined by bits 19 and 20 */
156 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
157 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
158 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
160 /* Instructions determined by bits 16 ... 20 */
161 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
162 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
165 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
168 /* MIPS special opcodes */
169 #define MASK_SPECIAL(op) MASK_OP_MAJOR(op) | (op & 0x3F)
173 OPC_SLL
= 0x00 | OPC_SPECIAL
,
174 /* NOP is SLL r0, r0, 0 */
175 /* SSNOP is SLL r0, r0, 1 */
176 /* EHB is SLL r0, r0, 3 */
177 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
178 OPC_ROTR
= OPC_SRL
| (1 << 21),
179 OPC_SRA
= 0x03 | OPC_SPECIAL
,
180 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
181 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
182 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
183 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
184 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
185 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
186 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
187 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
188 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
189 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
190 OPC_DROTR
= OPC_DSRL
| (1 << 21),
191 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
192 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
193 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
194 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
195 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
196 /* Multiplication / division */
197 OPC_MULT
= 0x18 | OPC_SPECIAL
,
198 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
199 OPC_DIV
= 0x1A | OPC_SPECIAL
,
200 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
201 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
202 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
203 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
204 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
206 /* 2 registers arithmetic / logic */
207 OPC_ADD
= 0x20 | OPC_SPECIAL
,
208 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
209 OPC_SUB
= 0x22 | OPC_SPECIAL
,
210 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
211 OPC_AND
= 0x24 | OPC_SPECIAL
,
212 OPC_OR
= 0x25 | OPC_SPECIAL
,
213 OPC_XOR
= 0x26 | OPC_SPECIAL
,
214 OPC_NOR
= 0x27 | OPC_SPECIAL
,
215 OPC_SLT
= 0x2A | OPC_SPECIAL
,
216 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
217 OPC_DADD
= 0x2C | OPC_SPECIAL
,
218 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
219 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
220 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
222 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
223 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
225 OPC_TGE
= 0x30 | OPC_SPECIAL
,
226 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
227 OPC_TLT
= 0x32 | OPC_SPECIAL
,
228 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
229 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
230 OPC_TNE
= 0x36 | OPC_SPECIAL
,
231 /* HI / LO registers load & stores */
232 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
233 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
234 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
235 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
236 /* Conditional moves */
237 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
238 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
240 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
241 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
243 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
246 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
247 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
248 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
249 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
250 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
252 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
253 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
254 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
255 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
258 /* R6 Multiply and Divide instructions have the same Opcode
259 and function field as legacy OPC_MULT[U]/OPC_DIV[U] */
260 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
263 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
264 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
265 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
266 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
267 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
268 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
269 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
270 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
272 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
273 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
274 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
275 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
276 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
277 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
278 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
279 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
281 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
282 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
283 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
284 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
285 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
287 OPC_LSA
= 0x05 | OPC_SPECIAL
,
288 OPC_DLSA
= 0x15 | OPC_SPECIAL
,
291 /* Multiplication variants of the vr54xx. */
292 #define MASK_MUL_VR54XX(op) MASK_SPECIAL(op) | (op & (0x1F << 6))
295 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
296 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
298 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
299 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
300 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
301 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
302 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
303 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
304 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
305 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
306 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
307 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
308 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
311 /* REGIMM (rt field) opcodes */
312 #define MASK_REGIMM(op) MASK_OP_MAJOR(op) | (op & (0x1F << 16))
315 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
316 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
317 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
318 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
319 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
320 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
321 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
322 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
323 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
324 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
325 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
326 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
327 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
328 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
329 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
330 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
332 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
333 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
336 /* Special2 opcodes */
337 #define MASK_SPECIAL2(op) MASK_OP_MAJOR(op) | (op & 0x3F)
340 /* Multiply & xxx operations */
341 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
342 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
343 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
344 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
345 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
347 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
348 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
349 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
350 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
351 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
352 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
353 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
354 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
355 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
356 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
357 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
358 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
360 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
361 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
362 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
363 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
365 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
368 /* Special3 opcodes */
369 #define MASK_SPECIAL3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
372 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
373 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
374 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
375 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
376 OPC_INS
= 0x04 | OPC_SPECIAL3
,
377 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
378 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
379 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
380 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
381 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
382 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
383 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
384 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
387 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
388 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
389 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
390 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
391 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
392 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
393 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
394 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
395 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
396 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
397 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
398 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
401 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
402 /* MIPS DSP Arithmetic */
403 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
404 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
405 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
406 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
407 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
408 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
409 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
410 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
411 /* MIPS DSP GPR-Based Shift Sub-class */
412 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
413 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
414 /* MIPS DSP Multiply Sub-class insns */
415 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
416 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
417 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
418 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
419 /* DSP Bit/Manipulation Sub-class */
420 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
421 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
422 /* MIPS DSP Append Sub-class */
423 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
424 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
425 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
426 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
427 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
430 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
431 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
432 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
433 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
434 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
435 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
439 #define MASK_BSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
442 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
443 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
444 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
445 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp */
446 OPC_ALIGN_END
= (0x0B << 6) | OPC_BSHFL
, /* 010.00 to 010.11 */
447 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
451 #define MASK_DBSHFL(op) MASK_SPECIAL3(op) | (op & (0x1F << 6))
454 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
455 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
456 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp */
457 OPC_DALIGN_END
= (0x0F << 6) | OPC_DBSHFL
, /* 01.000 to 01.111 */
458 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
461 /* MIPS DSP REGIMM opcodes */
463 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
464 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
467 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
470 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
471 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
472 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
473 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
476 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
478 /* MIPS DSP Arithmetic Sub-class */
479 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
480 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
481 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
482 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
483 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
484 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
485 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
486 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
487 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
488 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
489 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
490 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
491 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
492 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
493 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
494 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
495 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
496 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
497 /* MIPS DSP Multiply Sub-class insns */
498 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
499 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
500 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
501 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
502 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
503 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
506 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
507 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
509 /* MIPS DSP Arithmetic Sub-class */
510 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
511 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
512 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
513 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
514 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
515 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
516 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
517 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
518 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
519 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
520 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
521 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
522 /* MIPS DSP Multiply Sub-class insns */
523 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
524 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
525 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
526 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
529 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
531 /* MIPS DSP Arithmetic Sub-class */
532 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
533 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
534 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
535 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
536 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
537 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
538 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
539 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
540 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
541 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
542 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
543 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
544 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
545 /* DSP Bit/Manipulation Sub-class */
546 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
547 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
548 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
549 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
550 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
553 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
555 /* MIPS DSP Arithmetic Sub-class */
556 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
557 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
558 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
559 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
560 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
561 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
562 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
563 /* DSP Compare-Pick Sub-class */
564 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
565 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
566 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
567 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
568 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
569 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
570 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
571 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
572 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
573 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
574 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
575 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
576 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
577 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
578 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
581 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
583 /* MIPS DSP GPR-Based Shift Sub-class */
584 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
585 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
586 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
587 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
588 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
589 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
590 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
591 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
592 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
593 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
594 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
595 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
596 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
597 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
598 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
599 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
600 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
601 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
602 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
603 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
604 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
605 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
608 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
610 /* MIPS DSP Multiply Sub-class insns */
611 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
612 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
613 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
614 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
615 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
616 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
617 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
618 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
619 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
620 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
621 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
622 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
623 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
624 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
625 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
626 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
627 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
628 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
629 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
630 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
631 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
632 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
635 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
637 /* DSP Bit/Manipulation Sub-class */
638 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
641 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
643 /* MIPS DSP Append Sub-class */
644 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
645 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
646 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
649 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
651 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
652 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
653 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
654 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
655 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
656 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
657 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
658 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
659 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
660 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
661 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
662 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
663 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
664 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
665 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
666 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
667 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
668 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
671 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Arithmetic Sub-class */
674 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
675 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
676 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
677 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
678 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
679 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
680 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
681 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
682 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
683 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
684 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
685 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
686 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
687 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
688 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
689 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
690 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
691 /* DSP Bit/Manipulation Sub-class */
692 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
693 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
694 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
695 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
696 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
697 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
700 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
702 /* MIPS DSP Multiply Sub-class insns */
703 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
704 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
705 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
706 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
707 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
708 /* MIPS DSP Arithmetic Sub-class */
709 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
710 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
711 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
712 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
713 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
714 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
715 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
716 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
717 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
718 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
719 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
720 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
721 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
722 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
723 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
724 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
725 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
726 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
727 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
728 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
729 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
732 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
734 /* DSP Compare-Pick Sub-class */
735 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
736 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
737 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
738 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
739 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
740 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
741 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
742 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
743 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
744 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
745 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
746 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
747 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
748 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
749 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
750 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
751 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
752 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
753 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
754 /* MIPS DSP Arithmetic Sub-class */
755 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
756 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
757 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
758 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
759 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
760 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
761 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
762 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
765 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
767 /* DSP Append Sub-class */
768 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
769 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
770 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
771 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
774 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
776 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
777 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
778 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
779 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
780 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
781 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
782 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
783 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
784 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
785 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
786 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
787 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
788 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
789 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
790 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
791 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
792 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
793 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
794 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
795 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
796 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
797 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
800 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
802 /* DSP Bit/Manipulation Sub-class */
803 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
806 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
808 /* MIPS DSP Multiply Sub-class insns */
809 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
810 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
811 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
812 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
813 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
814 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
815 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
816 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
817 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
818 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
819 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
820 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
821 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
822 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
823 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
824 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
825 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
826 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
827 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
828 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
829 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
830 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
831 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
832 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
833 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
834 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
837 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
839 /* MIPS DSP GPR-Based Shift Sub-class */
840 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
841 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
842 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
843 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
844 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
845 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
846 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
847 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
848 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
849 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
850 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
851 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
852 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
853 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
854 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
855 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
856 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
857 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
858 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
859 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
860 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
861 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
862 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
863 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
864 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
865 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
868 /* Coprocessor 0 (rs field) */
869 #define MASK_CP0(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
872 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
873 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
874 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
875 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
876 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
877 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
878 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
879 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
880 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
881 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
882 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
883 OPC_C0
= (0x10 << 21) | OPC_CP0
,
884 OPC_C0_FIRST
= (0x10 << 21) | OPC_CP0
,
885 OPC_C0_LAST
= (0x1F << 21) | OPC_CP0
,
889 #define MASK_MFMC0(op) MASK_CP0(op) | (op & 0xFFFF)
892 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
893 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
894 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
895 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
896 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
897 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
898 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
899 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
902 /* Coprocessor 0 (with rs == C0) */
903 #define MASK_C0(op) MASK_CP0(op) | (op & 0x3F)
906 OPC_TLBR
= 0x01 | OPC_C0
,
907 OPC_TLBWI
= 0x02 | OPC_C0
,
908 OPC_TLBINV
= 0x03 | OPC_C0
,
909 OPC_TLBINVF
= 0x04 | OPC_C0
,
910 OPC_TLBWR
= 0x06 | OPC_C0
,
911 OPC_TLBP
= 0x08 | OPC_C0
,
912 OPC_RFE
= 0x10 | OPC_C0
,
913 OPC_ERET
= 0x18 | OPC_C0
,
914 OPC_DERET
= 0x1F | OPC_C0
,
915 OPC_WAIT
= 0x20 | OPC_C0
,
918 /* Coprocessor 1 (rs field) */
919 #define MASK_CP1(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
921 /* Values for the fmt field in FP instructions */
923 /* 0 - 15 are reserved */
924 FMT_S
= 16, /* single fp */
925 FMT_D
= 17, /* double fp */
926 FMT_E
= 18, /* extended fp */
927 FMT_Q
= 19, /* quad fp */
928 FMT_W
= 20, /* 32-bit fixed */
929 FMT_L
= 21, /* 64-bit fixed */
930 FMT_PS
= 22, /* paired single fp */
931 /* 23 - 31 are reserved */
935 OPC_MFC1
= (0x00 << 21) | OPC_CP1
,
936 OPC_DMFC1
= (0x01 << 21) | OPC_CP1
,
937 OPC_CFC1
= (0x02 << 21) | OPC_CP1
,
938 OPC_MFHC1
= (0x03 << 21) | OPC_CP1
,
939 OPC_MTC1
= (0x04 << 21) | OPC_CP1
,
940 OPC_DMTC1
= (0x05 << 21) | OPC_CP1
,
941 OPC_CTC1
= (0x06 << 21) | OPC_CP1
,
942 OPC_MTHC1
= (0x07 << 21) | OPC_CP1
,
943 OPC_BC1
= (0x08 << 21) | OPC_CP1
, /* bc */
944 OPC_BC1ANY2
= (0x09 << 21) | OPC_CP1
,
945 OPC_BC1ANY4
= (0x0A << 21) | OPC_CP1
,
946 OPC_BZ_V
= (0x0B << 21) | OPC_CP1
,
947 OPC_BNZ_V
= (0x0F << 21) | OPC_CP1
,
948 OPC_S_FMT
= (FMT_S
<< 21) | OPC_CP1
,
949 OPC_D_FMT
= (FMT_D
<< 21) | OPC_CP1
,
950 OPC_E_FMT
= (FMT_E
<< 21) | OPC_CP1
,
951 OPC_Q_FMT
= (FMT_Q
<< 21) | OPC_CP1
,
952 OPC_W_FMT
= (FMT_W
<< 21) | OPC_CP1
,
953 OPC_L_FMT
= (FMT_L
<< 21) | OPC_CP1
,
954 OPC_PS_FMT
= (FMT_PS
<< 21) | OPC_CP1
,
955 OPC_BC1EQZ
= (0x09 << 21) | OPC_CP1
,
956 OPC_BC1NEZ
= (0x0D << 21) | OPC_CP1
,
957 OPC_BZ_B
= (0x18 << 21) | OPC_CP1
,
958 OPC_BZ_H
= (0x19 << 21) | OPC_CP1
,
959 OPC_BZ_W
= (0x1A << 21) | OPC_CP1
,
960 OPC_BZ_D
= (0x1B << 21) | OPC_CP1
,
961 OPC_BNZ_B
= (0x1C << 21) | OPC_CP1
,
962 OPC_BNZ_H
= (0x1D << 21) | OPC_CP1
,
963 OPC_BNZ_W
= (0x1E << 21) | OPC_CP1
,
964 OPC_BNZ_D
= (0x1F << 21) | OPC_CP1
,
967 #define MASK_CP1_FUNC(op) MASK_CP1(op) | (op & 0x3F)
968 #define MASK_BC1(op) MASK_CP1(op) | (op & (0x3 << 16))
971 OPC_BC1F
= (0x00 << 16) | OPC_BC1
,
972 OPC_BC1T
= (0x01 << 16) | OPC_BC1
,
973 OPC_BC1FL
= (0x02 << 16) | OPC_BC1
,
974 OPC_BC1TL
= (0x03 << 16) | OPC_BC1
,
978 OPC_BC1FANY2
= (0x00 << 16) | OPC_BC1ANY2
,
979 OPC_BC1TANY2
= (0x01 << 16) | OPC_BC1ANY2
,
983 OPC_BC1FANY4
= (0x00 << 16) | OPC_BC1ANY4
,
984 OPC_BC1TANY4
= (0x01 << 16) | OPC_BC1ANY4
,
987 #define MASK_CP2(op) MASK_OP_MAJOR(op) | (op & (0x1F << 21))
990 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
991 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
992 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
993 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
994 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
995 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
996 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
997 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
998 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
999 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1000 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1003 #define MASK_LMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1006 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1007 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1008 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1009 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1010 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1011 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1012 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1013 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1015 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1016 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1017 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1018 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1019 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1020 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1021 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1022 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1024 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1025 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1026 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1027 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1028 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1029 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1030 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1031 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1033 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1034 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1035 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1036 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1037 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1038 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1039 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1040 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1042 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1043 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1044 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1045 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1046 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1047 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1049 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1050 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1051 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1052 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1053 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1054 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1056 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1057 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1058 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1059 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1060 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1061 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1063 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1064 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1065 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1066 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1067 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1068 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1070 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1071 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1072 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1073 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1074 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1075 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1077 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1078 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1079 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1080 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1081 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1082 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1084 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1085 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1086 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1087 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1088 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1089 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1091 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1092 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1093 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1094 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1095 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1096 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1100 #define MASK_CP3(op) MASK_OP_MAJOR(op) | (op & 0x3F)
1103 OPC_LWXC1
= 0x00 | OPC_CP3
,
1104 OPC_LDXC1
= 0x01 | OPC_CP3
,
1105 OPC_LUXC1
= 0x05 | OPC_CP3
,
1106 OPC_SWXC1
= 0x08 | OPC_CP3
,
1107 OPC_SDXC1
= 0x09 | OPC_CP3
,
1108 OPC_SUXC1
= 0x0D | OPC_CP3
,
1109 OPC_PREFX
= 0x0F | OPC_CP3
,
1110 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1111 OPC_MADD_S
= 0x20 | OPC_CP3
,
1112 OPC_MADD_D
= 0x21 | OPC_CP3
,
1113 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1114 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1115 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1116 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1117 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1118 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1119 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1120 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1121 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1122 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1126 #define MASK_MSA_MINOR(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1128 OPC_MSA_I8_00
= 0x00 | OPC_MSA
,
1129 OPC_MSA_I8_01
= 0x01 | OPC_MSA
,
1130 OPC_MSA_I8_02
= 0x02 | OPC_MSA
,
1131 OPC_MSA_I5_06
= 0x06 | OPC_MSA
,
1132 OPC_MSA_I5_07
= 0x07 | OPC_MSA
,
1133 OPC_MSA_BIT_09
= 0x09 | OPC_MSA
,
1134 OPC_MSA_BIT_0A
= 0x0A | OPC_MSA
,
1135 OPC_MSA_3R_0D
= 0x0D | OPC_MSA
,
1136 OPC_MSA_3R_0E
= 0x0E | OPC_MSA
,
1137 OPC_MSA_3R_0F
= 0x0F | OPC_MSA
,
1138 OPC_MSA_3R_10
= 0x10 | OPC_MSA
,
1139 OPC_MSA_3R_11
= 0x11 | OPC_MSA
,
1140 OPC_MSA_3R_12
= 0x12 | OPC_MSA
,
1141 OPC_MSA_3R_13
= 0x13 | OPC_MSA
,
1142 OPC_MSA_3R_14
= 0x14 | OPC_MSA
,
1143 OPC_MSA_3R_15
= 0x15 | OPC_MSA
,
1144 OPC_MSA_ELM
= 0x19 | OPC_MSA
,
1145 OPC_MSA_3RF_1A
= 0x1A | OPC_MSA
,
1146 OPC_MSA_3RF_1B
= 0x1B | OPC_MSA
,
1147 OPC_MSA_3RF_1C
= 0x1C | OPC_MSA
,
1148 OPC_MSA_VEC
= 0x1E | OPC_MSA
,
1150 /* MI10 instruction */
1151 OPC_LD_B
= (0x20) | OPC_MSA
,
1152 OPC_LD_H
= (0x21) | OPC_MSA
,
1153 OPC_LD_W
= (0x22) | OPC_MSA
,
1154 OPC_LD_D
= (0x23) | OPC_MSA
,
1155 OPC_ST_B
= (0x24) | OPC_MSA
,
1156 OPC_ST_H
= (0x25) | OPC_MSA
,
1157 OPC_ST_W
= (0x26) | OPC_MSA
,
1158 OPC_ST_D
= (0x27) | OPC_MSA
,
1162 /* I5 instruction df(bits 22..21) = _b, _h, _w, _d */
1163 OPC_ADDVI_df
= (0x0 << 23) | OPC_MSA_I5_06
,
1164 OPC_CEQI_df
= (0x0 << 23) | OPC_MSA_I5_07
,
1165 OPC_SUBVI_df
= (0x1 << 23) | OPC_MSA_I5_06
,
1166 OPC_MAXI_S_df
= (0x2 << 23) | OPC_MSA_I5_06
,
1167 OPC_CLTI_S_df
= (0x2 << 23) | OPC_MSA_I5_07
,
1168 OPC_MAXI_U_df
= (0x3 << 23) | OPC_MSA_I5_06
,
1169 OPC_CLTI_U_df
= (0x3 << 23) | OPC_MSA_I5_07
,
1170 OPC_MINI_S_df
= (0x4 << 23) | OPC_MSA_I5_06
,
1171 OPC_CLEI_S_df
= (0x4 << 23) | OPC_MSA_I5_07
,
1172 OPC_MINI_U_df
= (0x5 << 23) | OPC_MSA_I5_06
,
1173 OPC_CLEI_U_df
= (0x5 << 23) | OPC_MSA_I5_07
,
1174 OPC_LDI_df
= (0x6 << 23) | OPC_MSA_I5_07
,
1176 /* I8 instruction */
1177 OPC_ANDI_B
= (0x0 << 24) | OPC_MSA_I8_00
,
1178 OPC_BMNZI_B
= (0x0 << 24) | OPC_MSA_I8_01
,
1179 OPC_SHF_B
= (0x0 << 24) | OPC_MSA_I8_02
,
1180 OPC_ORI_B
= (0x1 << 24) | OPC_MSA_I8_00
,
1181 OPC_BMZI_B
= (0x1 << 24) | OPC_MSA_I8_01
,
1182 OPC_SHF_H
= (0x1 << 24) | OPC_MSA_I8_02
,
1183 OPC_NORI_B
= (0x2 << 24) | OPC_MSA_I8_00
,
1184 OPC_BSELI_B
= (0x2 << 24) | OPC_MSA_I8_01
,
1185 OPC_SHF_W
= (0x2 << 24) | OPC_MSA_I8_02
,
1186 OPC_XORI_B
= (0x3 << 24) | OPC_MSA_I8_00
,
1188 /* VEC/2R/2RF instruction */
1189 OPC_AND_V
= (0x00 << 21) | OPC_MSA_VEC
,
1190 OPC_OR_V
= (0x01 << 21) | OPC_MSA_VEC
,
1191 OPC_NOR_V
= (0x02 << 21) | OPC_MSA_VEC
,
1192 OPC_XOR_V
= (0x03 << 21) | OPC_MSA_VEC
,
1193 OPC_BMNZ_V
= (0x04 << 21) | OPC_MSA_VEC
,
1194 OPC_BMZ_V
= (0x05 << 21) | OPC_MSA_VEC
,
1195 OPC_BSEL_V
= (0x06 << 21) | OPC_MSA_VEC
,
1197 OPC_MSA_2R
= (0x18 << 21) | OPC_MSA_VEC
,
1198 OPC_MSA_2RF
= (0x19 << 21) | OPC_MSA_VEC
,
1200 /* 2R instruction df(bits 17..16) = _b, _h, _w, _d */
1201 OPC_FILL_df
= (0x00 << 18) | OPC_MSA_2R
,
1202 OPC_PCNT_df
= (0x01 << 18) | OPC_MSA_2R
,
1203 OPC_NLOC_df
= (0x02 << 18) | OPC_MSA_2R
,
1204 OPC_NLZC_df
= (0x03 << 18) | OPC_MSA_2R
,
1206 /* 2RF instruction df(bit 16) = _w, _d */
1207 OPC_FCLASS_df
= (0x00 << 17) | OPC_MSA_2RF
,
1208 OPC_FTRUNC_S_df
= (0x01 << 17) | OPC_MSA_2RF
,
1209 OPC_FTRUNC_U_df
= (0x02 << 17) | OPC_MSA_2RF
,
1210 OPC_FSQRT_df
= (0x03 << 17) | OPC_MSA_2RF
,
1211 OPC_FRSQRT_df
= (0x04 << 17) | OPC_MSA_2RF
,
1212 OPC_FRCP_df
= (0x05 << 17) | OPC_MSA_2RF
,
1213 OPC_FRINT_df
= (0x06 << 17) | OPC_MSA_2RF
,
1214 OPC_FLOG2_df
= (0x07 << 17) | OPC_MSA_2RF
,
1215 OPC_FEXUPL_df
= (0x08 << 17) | OPC_MSA_2RF
,
1216 OPC_FEXUPR_df
= (0x09 << 17) | OPC_MSA_2RF
,
1217 OPC_FFQL_df
= (0x0A << 17) | OPC_MSA_2RF
,
1218 OPC_FFQR_df
= (0x0B << 17) | OPC_MSA_2RF
,
1219 OPC_FTINT_S_df
= (0x0C << 17) | OPC_MSA_2RF
,
1220 OPC_FTINT_U_df
= (0x0D << 17) | OPC_MSA_2RF
,
1221 OPC_FFINT_S_df
= (0x0E << 17) | OPC_MSA_2RF
,
1222 OPC_FFINT_U_df
= (0x0F << 17) | OPC_MSA_2RF
,
1224 /* 3R instruction df(bits 22..21) = _b, _h, _w, d */
1225 OPC_SLL_df
= (0x0 << 23) | OPC_MSA_3R_0D
,
1226 OPC_ADDV_df
= (0x0 << 23) | OPC_MSA_3R_0E
,
1227 OPC_CEQ_df
= (0x0 << 23) | OPC_MSA_3R_0F
,
1228 OPC_ADD_A_df
= (0x0 << 23) | OPC_MSA_3R_10
,
1229 OPC_SUBS_S_df
= (0x0 << 23) | OPC_MSA_3R_11
,
1230 OPC_MULV_df
= (0x0 << 23) | OPC_MSA_3R_12
,
1231 OPC_DOTP_S_df
= (0x0 << 23) | OPC_MSA_3R_13
,
1232 OPC_SLD_df
= (0x0 << 23) | OPC_MSA_3R_14
,
1233 OPC_VSHF_df
= (0x0 << 23) | OPC_MSA_3R_15
,
1234 OPC_SRA_df
= (0x1 << 23) | OPC_MSA_3R_0D
,
1235 OPC_SUBV_df
= (0x1 << 23) | OPC_MSA_3R_0E
,
1236 OPC_ADDS_A_df
= (0x1 << 23) | OPC_MSA_3R_10
,
1237 OPC_SUBS_U_df
= (0x1 << 23) | OPC_MSA_3R_11
,
1238 OPC_MADDV_df
= (0x1 << 23) | OPC_MSA_3R_12
,
1239 OPC_DOTP_U_df
= (0x1 << 23) | OPC_MSA_3R_13
,
1240 OPC_SPLAT_df
= (0x1 << 23) | OPC_MSA_3R_14
,
1241 OPC_SRAR_df
= (0x1 << 23) | OPC_MSA_3R_15
,
1242 OPC_SRL_df
= (0x2 << 23) | OPC_MSA_3R_0D
,
1243 OPC_MAX_S_df
= (0x2 << 23) | OPC_MSA_3R_0E
,
1244 OPC_CLT_S_df
= (0x2 << 23) | OPC_MSA_3R_0F
,
1245 OPC_ADDS_S_df
= (0x2 << 23) | OPC_MSA_3R_10
,
1246 OPC_SUBSUS_U_df
= (0x2 << 23) | OPC_MSA_3R_11
,
1247 OPC_MSUBV_df
= (0x2 << 23) | OPC_MSA_3R_12
,
1248 OPC_DPADD_S_df
= (0x2 << 23) | OPC_MSA_3R_13
,
1249 OPC_PCKEV_df
= (0x2 << 23) | OPC_MSA_3R_14
,
1250 OPC_SRLR_df
= (0x2 << 23) | OPC_MSA_3R_15
,
1251 OPC_BCLR_df
= (0x3 << 23) | OPC_MSA_3R_0D
,
1252 OPC_MAX_U_df
= (0x3 << 23) | OPC_MSA_3R_0E
,
1253 OPC_CLT_U_df
= (0x3 << 23) | OPC_MSA_3R_0F
,
1254 OPC_ADDS_U_df
= (0x3 << 23) | OPC_MSA_3R_10
,
1255 OPC_SUBSUU_S_df
= (0x3 << 23) | OPC_MSA_3R_11
,
1256 OPC_DPADD_U_df
= (0x3 << 23) | OPC_MSA_3R_13
,
1257 OPC_PCKOD_df
= (0x3 << 23) | OPC_MSA_3R_14
,
1258 OPC_BSET_df
= (0x4 << 23) | OPC_MSA_3R_0D
,
1259 OPC_MIN_S_df
= (0x4 << 23) | OPC_MSA_3R_0E
,
1260 OPC_CLE_S_df
= (0x4 << 23) | OPC_MSA_3R_0F
,
1261 OPC_AVE_S_df
= (0x4 << 23) | OPC_MSA_3R_10
,
1262 OPC_ASUB_S_df
= (0x4 << 23) | OPC_MSA_3R_11
,
1263 OPC_DIV_S_df
= (0x4 << 23) | OPC_MSA_3R_12
,
1264 OPC_DPSUB_S_df
= (0x4 << 23) | OPC_MSA_3R_13
,
1265 OPC_ILVL_df
= (0x4 << 23) | OPC_MSA_3R_14
,
1266 OPC_HADD_S_df
= (0x4 << 23) | OPC_MSA_3R_15
,
1267 OPC_BNEG_df
= (0x5 << 23) | OPC_MSA_3R_0D
,
1268 OPC_MIN_U_df
= (0x5 << 23) | OPC_MSA_3R_0E
,
1269 OPC_CLE_U_df
= (0x5 << 23) | OPC_MSA_3R_0F
,
1270 OPC_AVE_U_df
= (0x5 << 23) | OPC_MSA_3R_10
,
1271 OPC_ASUB_U_df
= (0x5 << 23) | OPC_MSA_3R_11
,
1272 OPC_DIV_U_df
= (0x5 << 23) | OPC_MSA_3R_12
,
1273 OPC_DPSUB_U_df
= (0x5 << 23) | OPC_MSA_3R_13
,
1274 OPC_ILVR_df
= (0x5 << 23) | OPC_MSA_3R_14
,
1275 OPC_HADD_U_df
= (0x5 << 23) | OPC_MSA_3R_15
,
1276 OPC_BINSL_df
= (0x6 << 23) | OPC_MSA_3R_0D
,
1277 OPC_MAX_A_df
= (0x6 << 23) | OPC_MSA_3R_0E
,
1278 OPC_AVER_S_df
= (0x6 << 23) | OPC_MSA_3R_10
,
1279 OPC_MOD_S_df
= (0x6 << 23) | OPC_MSA_3R_12
,
1280 OPC_ILVEV_df
= (0x6 << 23) | OPC_MSA_3R_14
,
1281 OPC_HSUB_S_df
= (0x6 << 23) | OPC_MSA_3R_15
,
1282 OPC_BINSR_df
= (0x7 << 23) | OPC_MSA_3R_0D
,
1283 OPC_MIN_A_df
= (0x7 << 23) | OPC_MSA_3R_0E
,
1284 OPC_AVER_U_df
= (0x7 << 23) | OPC_MSA_3R_10
,
1285 OPC_MOD_U_df
= (0x7 << 23) | OPC_MSA_3R_12
,
1286 OPC_ILVOD_df
= (0x7 << 23) | OPC_MSA_3R_14
,
1287 OPC_HSUB_U_df
= (0x7 << 23) | OPC_MSA_3R_15
,
1289 /* ELM instructions df(bits 21..16) = _b, _h, _w, _d */
1290 OPC_SLDI_df
= (0x0 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1291 OPC_CTCMSA
= (0x0 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1292 OPC_SPLATI_df
= (0x1 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1293 OPC_CFCMSA
= (0x1 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1294 OPC_COPY_S_df
= (0x2 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1295 OPC_MOVE_V
= (0x2 << 22) | (0x3E << 16) | OPC_MSA_ELM
,
1296 OPC_COPY_U_df
= (0x3 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1297 OPC_INSERT_df
= (0x4 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1298 OPC_INSVE_df
= (0x5 << 22) | (0x00 << 16) | OPC_MSA_ELM
,
1300 /* 3RF instruction _df(bit 21) = _w, _d */
1301 OPC_FCAF_df
= (0x0 << 22) | OPC_MSA_3RF_1A
,
1302 OPC_FADD_df
= (0x0 << 22) | OPC_MSA_3RF_1B
,
1303 OPC_FCUN_df
= (0x1 << 22) | OPC_MSA_3RF_1A
,
1304 OPC_FSUB_df
= (0x1 << 22) | OPC_MSA_3RF_1B
,
1305 OPC_FCOR_df
= (0x1 << 22) | OPC_MSA_3RF_1C
,
1306 OPC_FCEQ_df
= (0x2 << 22) | OPC_MSA_3RF_1A
,
1307 OPC_FMUL_df
= (0x2 << 22) | OPC_MSA_3RF_1B
,
1308 OPC_FCUNE_df
= (0x2 << 22) | OPC_MSA_3RF_1C
,
1309 OPC_FCUEQ_df
= (0x3 << 22) | OPC_MSA_3RF_1A
,
1310 OPC_FDIV_df
= (0x3 << 22) | OPC_MSA_3RF_1B
,
1311 OPC_FCNE_df
= (0x3 << 22) | OPC_MSA_3RF_1C
,
1312 OPC_FCLT_df
= (0x4 << 22) | OPC_MSA_3RF_1A
,
1313 OPC_FMADD_df
= (0x4 << 22) | OPC_MSA_3RF_1B
,
1314 OPC_MUL_Q_df
= (0x4 << 22) | OPC_MSA_3RF_1C
,
1315 OPC_FCULT_df
= (0x5 << 22) | OPC_MSA_3RF_1A
,
1316 OPC_FMSUB_df
= (0x5 << 22) | OPC_MSA_3RF_1B
,
1317 OPC_MADD_Q_df
= (0x5 << 22) | OPC_MSA_3RF_1C
,
1318 OPC_FCLE_df
= (0x6 << 22) | OPC_MSA_3RF_1A
,
1319 OPC_MSUB_Q_df
= (0x6 << 22) | OPC_MSA_3RF_1C
,
1320 OPC_FCULE_df
= (0x7 << 22) | OPC_MSA_3RF_1A
,
1321 OPC_FEXP2_df
= (0x7 << 22) | OPC_MSA_3RF_1B
,
1322 OPC_FSAF_df
= (0x8 << 22) | OPC_MSA_3RF_1A
,
1323 OPC_FEXDO_df
= (0x8 << 22) | OPC_MSA_3RF_1B
,
1324 OPC_FSUN_df
= (0x9 << 22) | OPC_MSA_3RF_1A
,
1325 OPC_FSOR_df
= (0x9 << 22) | OPC_MSA_3RF_1C
,
1326 OPC_FSEQ_df
= (0xA << 22) | OPC_MSA_3RF_1A
,
1327 OPC_FTQ_df
= (0xA << 22) | OPC_MSA_3RF_1B
,
1328 OPC_FSUNE_df
= (0xA << 22) | OPC_MSA_3RF_1C
,
1329 OPC_FSUEQ_df
= (0xB << 22) | OPC_MSA_3RF_1A
,
1330 OPC_FSNE_df
= (0xB << 22) | OPC_MSA_3RF_1C
,
1331 OPC_FSLT_df
= (0xC << 22) | OPC_MSA_3RF_1A
,
1332 OPC_FMIN_df
= (0xC << 22) | OPC_MSA_3RF_1B
,
1333 OPC_MULR_Q_df
= (0xC << 22) | OPC_MSA_3RF_1C
,
1334 OPC_FSULT_df
= (0xD << 22) | OPC_MSA_3RF_1A
,
1335 OPC_FMIN_A_df
= (0xD << 22) | OPC_MSA_3RF_1B
,
1336 OPC_MADDR_Q_df
= (0xD << 22) | OPC_MSA_3RF_1C
,
1337 OPC_FSLE_df
= (0xE << 22) | OPC_MSA_3RF_1A
,
1338 OPC_FMAX_df
= (0xE << 22) | OPC_MSA_3RF_1B
,
1339 OPC_MSUBR_Q_df
= (0xE << 22) | OPC_MSA_3RF_1C
,
1340 OPC_FSULE_df
= (0xF << 22) | OPC_MSA_3RF_1A
,
1341 OPC_FMAX_A_df
= (0xF << 22) | OPC_MSA_3RF_1B
,
1343 /* BIT instruction df(bits 22..16) = _B _H _W _D */
1344 OPC_SLLI_df
= (0x0 << 23) | OPC_MSA_BIT_09
,
1345 OPC_SAT_S_df
= (0x0 << 23) | OPC_MSA_BIT_0A
,
1346 OPC_SRAI_df
= (0x1 << 23) | OPC_MSA_BIT_09
,
1347 OPC_SAT_U_df
= (0x1 << 23) | OPC_MSA_BIT_0A
,
1348 OPC_SRLI_df
= (0x2 << 23) | OPC_MSA_BIT_09
,
1349 OPC_SRARI_df
= (0x2 << 23) | OPC_MSA_BIT_0A
,
1350 OPC_BCLRI_df
= (0x3 << 23) | OPC_MSA_BIT_09
,
1351 OPC_SRLRI_df
= (0x3 << 23) | OPC_MSA_BIT_0A
,
1352 OPC_BSETI_df
= (0x4 << 23) | OPC_MSA_BIT_09
,
1353 OPC_BNEGI_df
= (0x5 << 23) | OPC_MSA_BIT_09
,
1354 OPC_BINSLI_df
= (0x6 << 23) | OPC_MSA_BIT_09
,
1355 OPC_BINSRI_df
= (0x7 << 23) | OPC_MSA_BIT_09
,
1358 /* global register indices */
1359 static TCGv_env cpu_env
;
1360 static TCGv cpu_gpr
[32], cpu_PC
;
1361 static TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1362 static TCGv cpu_dspctrl
, btarget
, bcond
;
1363 static TCGv_i32 hflags
;
1364 static TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1365 static TCGv_i64 fpu_f64
[32];
1366 static TCGv_i64 msa_wr_d
[64];
1368 #include "exec/gen-icount.h"
1370 #define gen_helper_0e0i(name, arg) do { \
1371 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1372 gen_helper_##name(cpu_env, helper_tmp); \
1373 tcg_temp_free_i32(helper_tmp); \
1376 #define gen_helper_0e1i(name, arg1, arg2) do { \
1377 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1378 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1379 tcg_temp_free_i32(helper_tmp); \
1382 #define gen_helper_1e0i(name, ret, arg1) do { \
1383 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1384 gen_helper_##name(ret, cpu_env, helper_tmp); \
1385 tcg_temp_free_i32(helper_tmp); \
1388 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1389 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1390 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1391 tcg_temp_free_i32(helper_tmp); \
1394 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
1395 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1396 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
1397 tcg_temp_free_i32(helper_tmp); \
1400 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
1401 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
1402 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
1403 tcg_temp_free_i32(helper_tmp); \
1406 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
1407 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
1408 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
1409 tcg_temp_free_i32(helper_tmp); \
1412 typedef struct DisasContext
{
1413 struct TranslationBlock
*tb
;
1414 target_ulong pc
, saved_pc
;
1416 int singlestep_enabled
;
1418 int32_t CP0_Config1
;
1419 /* Routine used to access memory */
1421 TCGMemOp default_tcg_memop_mask
;
1422 uint32_t hflags
, saved_hflags
;
1424 target_ulong btarget
;
1433 int CP0_LLAddr_shift
;
1443 BS_NONE
= 0, /* We go out of the TB without reaching a branch or an
1444 * exception condition */
1445 BS_STOP
= 1, /* We want to stop translation for any reason */
1446 BS_BRANCH
= 2, /* We reached a branch condition */
1447 BS_EXCP
= 3, /* We reached an exception condition */
1450 static const char * const regnames
[] = {
1451 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
1452 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
1453 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
1454 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
1457 static const char * const regnames_HI
[] = {
1458 "HI0", "HI1", "HI2", "HI3",
1461 static const char * const regnames_LO
[] = {
1462 "LO0", "LO1", "LO2", "LO3",
1465 static const char * const fregnames
[] = {
1466 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
1467 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
1468 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
1469 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
1472 static const char * const msaregnames
[] = {
1473 "w0.d0", "w0.d1", "w1.d0", "w1.d1",
1474 "w2.d0", "w2.d1", "w3.d0", "w3.d1",
1475 "w4.d0", "w4.d1", "w5.d0", "w5.d1",
1476 "w6.d0", "w6.d1", "w7.d0", "w7.d1",
1477 "w8.d0", "w8.d1", "w9.d0", "w9.d1",
1478 "w10.d0", "w10.d1", "w11.d0", "w11.d1",
1479 "w12.d0", "w12.d1", "w13.d0", "w13.d1",
1480 "w14.d0", "w14.d1", "w15.d0", "w15.d1",
1481 "w16.d0", "w16.d1", "w17.d0", "w17.d1",
1482 "w18.d0", "w18.d1", "w19.d0", "w19.d1",
1483 "w20.d0", "w20.d1", "w21.d0", "w21.d1",
1484 "w22.d0", "w22.d1", "w23.d0", "w23.d1",
1485 "w24.d0", "w24.d1", "w25.d0", "w25.d1",
1486 "w26.d0", "w26.d1", "w27.d0", "w27.d1",
1487 "w28.d0", "w28.d1", "w29.d0", "w29.d1",
1488 "w30.d0", "w30.d1", "w31.d0", "w31.d1",
1491 #define LOG_DISAS(...) \
1493 if (MIPS_DEBUG_DISAS) { \
1494 qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__); \
1498 #define MIPS_INVAL(op) \
1500 if (MIPS_DEBUG_DISAS) { \
1501 qemu_log_mask(CPU_LOG_TB_IN_ASM, \
1502 TARGET_FMT_lx ": %08x Invalid %s %03x %03x %03x\n", \
1503 ctx->pc, ctx->opcode, op, ctx->opcode >> 26, \
1504 ctx->opcode & 0x3F, ((ctx->opcode >> 16) & 0x1F)); \
1508 /* General purpose registers moves. */
1509 static inline void gen_load_gpr (TCGv t
, int reg
)
1512 tcg_gen_movi_tl(t
, 0);
1514 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
1517 static inline void gen_store_gpr (TCGv t
, int reg
)
1520 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
1523 /* Moves to/from shadow registers. */
1524 static inline void gen_load_srsgpr (int from
, int to
)
1526 TCGv t0
= tcg_temp_new();
1529 tcg_gen_movi_tl(t0
, 0);
1531 TCGv_i32 t2
= tcg_temp_new_i32();
1532 TCGv_ptr addr
= tcg_temp_new_ptr();
1534 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1535 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1536 tcg_gen_andi_i32(t2
, t2
, 0xf);
1537 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1538 tcg_gen_ext_i32_ptr(addr
, t2
);
1539 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1541 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
1542 tcg_temp_free_ptr(addr
);
1543 tcg_temp_free_i32(t2
);
1545 gen_store_gpr(t0
, to
);
1549 static inline void gen_store_srsgpr (int from
, int to
)
1552 TCGv t0
= tcg_temp_new();
1553 TCGv_i32 t2
= tcg_temp_new_i32();
1554 TCGv_ptr addr
= tcg_temp_new_ptr();
1556 gen_load_gpr(t0
, from
);
1557 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
1558 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
1559 tcg_gen_andi_i32(t2
, t2
, 0xf);
1560 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
1561 tcg_gen_ext_i32_ptr(addr
, t2
);
1562 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
1564 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
1565 tcg_temp_free_ptr(addr
);
1566 tcg_temp_free_i32(t2
);
1572 static inline void gen_save_pc(target_ulong pc
)
1574 tcg_gen_movi_tl(cpu_PC
, pc
);
1577 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
1579 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
1580 if (do_save_pc
&& ctx
->pc
!= ctx
->saved_pc
) {
1581 gen_save_pc(ctx
->pc
);
1582 ctx
->saved_pc
= ctx
->pc
;
1584 if (ctx
->hflags
!= ctx
->saved_hflags
) {
1585 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
1586 ctx
->saved_hflags
= ctx
->hflags
;
1587 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1593 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
1599 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
1601 ctx
->saved_hflags
= ctx
->hflags
;
1602 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
1608 ctx
->btarget
= env
->btarget
;
1613 static inline void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
1615 TCGv_i32 texcp
= tcg_const_i32(excp
);
1616 TCGv_i32 terr
= tcg_const_i32(err
);
1617 save_cpu_state(ctx
, 1);
1618 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
1619 tcg_temp_free_i32(terr
);
1620 tcg_temp_free_i32(texcp
);
1621 ctx
->bstate
= BS_EXCP
;
1624 static inline void generate_exception(DisasContext
*ctx
, int excp
)
1626 gen_helper_0e0i(raise_exception
, excp
);
1629 static inline void generate_exception_end(DisasContext
*ctx
, int excp
)
1631 generate_exception_err(ctx
, excp
, 0);
1634 /* Floating point register moves. */
1635 static void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1637 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1638 generate_exception(ctx
, EXCP_RI
);
1640 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
1643 static void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1646 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
1647 generate_exception(ctx
, EXCP_RI
);
1649 t64
= tcg_temp_new_i64();
1650 tcg_gen_extu_i32_i64(t64
, t
);
1651 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
1652 tcg_temp_free_i64(t64
);
1655 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1657 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1658 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
1660 gen_load_fpr32(ctx
, t
, reg
| 1);
1664 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
1666 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1667 TCGv_i64 t64
= tcg_temp_new_i64();
1668 tcg_gen_extu_i32_i64(t64
, t
);
1669 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
1670 tcg_temp_free_i64(t64
);
1672 gen_store_fpr32(ctx
, t
, reg
| 1);
1676 static void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1678 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1679 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
1681 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
1685 static void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
1687 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
1688 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
1691 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
1692 t0
= tcg_temp_new_i64();
1693 tcg_gen_shri_i64(t0
, t
, 32);
1694 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
1695 tcg_temp_free_i64(t0
);
1699 static inline int get_fp_bit (int cc
)
1707 /* Addresses computation */
1708 static inline void gen_op_addr_add (DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
1710 tcg_gen_add_tl(ret
, arg0
, arg1
);
1712 #if defined(TARGET_MIPS64)
1713 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1714 tcg_gen_ext32s_i64(ret
, ret
);
1719 /* Addresses computation (translation time) */
1720 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
1723 target_long sum
= base
+ offset
;
1725 #if defined(TARGET_MIPS64)
1726 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
1733 /* Sign-extract the low 32-bits to a target_long. */
1734 static inline void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
1736 #if defined(TARGET_MIPS64)
1737 tcg_gen_ext32s_i64(ret
, arg
);
1739 tcg_gen_extrl_i64_i32(ret
, arg
);
1743 /* Sign-extract the high 32-bits to a target_long. */
1744 static inline void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
1746 #if defined(TARGET_MIPS64)
1747 tcg_gen_sari_i64(ret
, arg
, 32);
1749 tcg_gen_extrh_i64_i32(ret
, arg
);
1753 static inline void check_cp0_enabled(DisasContext
*ctx
)
1755 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
)))
1756 generate_exception_err(ctx
, EXCP_CpU
, 0);
1759 static inline void check_cp1_enabled(DisasContext
*ctx
)
1761 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
)))
1762 generate_exception_err(ctx
, EXCP_CpU
, 1);
1765 /* Verify that the processor is running with COP1X instructions enabled.
1766 This is associated with the nabla symbol in the MIPS32 and MIPS64
1769 static inline void check_cop1x(DisasContext
*ctx
)
1771 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
)))
1772 generate_exception_end(ctx
, EXCP_RI
);
1775 /* Verify that the processor is running with 64-bit floating-point
1776 operations enabled. */
1778 static inline void check_cp1_64bitmode(DisasContext
*ctx
)
1780 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
)))
1781 generate_exception_end(ctx
, EXCP_RI
);
1785 * Verify if floating point register is valid; an operation is not defined
1786 * if bit 0 of any register specification is set and the FR bit in the
1787 * Status register equals zero, since the register numbers specify an
1788 * even-odd pair of adjacent coprocessor general registers. When the FR bit
1789 * in the Status register equals one, both even and odd register numbers
1790 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
1792 * Multiple 64 bit wide registers can be checked by calling
1793 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
1795 static inline void check_cp1_registers(DisasContext
*ctx
, int regs
)
1797 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1)))
1798 generate_exception_end(ctx
, EXCP_RI
);
1801 /* Verify that the processor is running with DSP instructions enabled.
1802 This is enabled by CP0 Status register MX(24) bit.
1805 static inline void check_dsp(DisasContext
*ctx
)
1807 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
1808 if (ctx
->insn_flags
& ASE_DSP
) {
1809 generate_exception_end(ctx
, EXCP_DSPDIS
);
1811 generate_exception_end(ctx
, EXCP_RI
);
1816 static inline void check_dspr2(DisasContext
*ctx
)
1818 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSPR2
))) {
1819 if (ctx
->insn_flags
& ASE_DSP
) {
1820 generate_exception_end(ctx
, EXCP_DSPDIS
);
1822 generate_exception_end(ctx
, EXCP_RI
);
1827 /* This code generates a "reserved instruction" exception if the
1828 CPU does not support the instruction set corresponding to flags. */
1829 static inline void check_insn(DisasContext
*ctx
, int flags
)
1831 if (unlikely(!(ctx
->insn_flags
& flags
))) {
1832 generate_exception_end(ctx
, EXCP_RI
);
1836 /* This code generates a "reserved instruction" exception if the
1837 CPU has corresponding flag set which indicates that the instruction
1838 has been removed. */
1839 static inline void check_insn_opc_removed(DisasContext
*ctx
, int flags
)
1841 if (unlikely(ctx
->insn_flags
& flags
)) {
1842 generate_exception_end(ctx
, EXCP_RI
);
1846 /* This code generates a "reserved instruction" exception if the
1847 CPU does not support 64-bit paired-single (PS) floating point data type */
1848 static inline void check_ps(DisasContext
*ctx
)
1850 if (unlikely(!ctx
->ps
)) {
1851 generate_exception(ctx
, EXCP_RI
);
1853 check_cp1_64bitmode(ctx
);
1856 #ifdef TARGET_MIPS64
1857 /* This code generates a "reserved instruction" exception if 64-bit
1858 instructions are not enabled. */
1859 static inline void check_mips_64(DisasContext
*ctx
)
1861 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_64
)))
1862 generate_exception_end(ctx
, EXCP_RI
);
1866 #ifndef CONFIG_USER_ONLY
1867 static inline void check_mvh(DisasContext
*ctx
)
1869 if (unlikely(!ctx
->mvh
)) {
1870 generate_exception(ctx
, EXCP_RI
);
1875 /* Define small wrappers for gen_load_fpr* so that we have a uniform
1876 calling interface for 32 and 64-bit FPRs. No sense in changing
1877 all callers for gen_load_fpr32 when we need the CTX parameter for
1879 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
1880 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
1881 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
1882 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
1883 int ft, int fs, int cc) \
1885 TCGv_i##bits fp0 = tcg_temp_new_i##bits (); \
1886 TCGv_i##bits fp1 = tcg_temp_new_i##bits (); \
1895 check_cp1_registers(ctx, fs | ft); \
1903 gen_ldcmp_fpr##bits (ctx, fp0, fs); \
1904 gen_ldcmp_fpr##bits (ctx, fp1, ft); \
1906 case 0: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); break;\
1907 case 1: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); break;\
1908 case 2: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); break;\
1909 case 3: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); break;\
1910 case 4: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); break;\
1911 case 5: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); break;\
1912 case 6: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); break;\
1913 case 7: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); break;\
1914 case 8: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); break;\
1915 case 9: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); break;\
1916 case 10: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); break;\
1917 case 11: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); break;\
1918 case 12: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); break;\
1919 case 13: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); break;\
1920 case 14: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); break;\
1921 case 15: gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); break;\
1924 tcg_temp_free_i##bits (fp0); \
1925 tcg_temp_free_i##bits (fp1); \
1928 FOP_CONDS(, 0, d
, FMT_D
, 64)
1929 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
1930 FOP_CONDS(, 0, s
, FMT_S
, 32)
1931 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
1932 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
1933 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
1936 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
1937 static inline void gen_r6_cmp_ ## fmt(DisasContext * ctx, int n, \
1938 int ft, int fs, int fd) \
1940 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
1941 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
1942 if (ifmt == FMT_D) { \
1943 check_cp1_registers(ctx, fs | ft | fd); \
1945 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
1946 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
1949 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
1952 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
1955 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
1958 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
1961 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
1964 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
1967 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
1970 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
1973 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
1976 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
1979 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
1982 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
1985 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
1988 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
1991 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
1994 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
1997 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2000 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2003 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2006 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2009 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2012 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2018 tcg_temp_free_i ## bits (fp0); \
2019 tcg_temp_free_i ## bits (fp1); \
2022 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2023 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2025 #undef gen_ldcmp_fpr32
2026 #undef gen_ldcmp_fpr64
2028 /* load/store instructions. */
2029 #ifdef CONFIG_USER_ONLY
2030 #define OP_LD_ATOMIC(insn,fname) \
2031 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2033 TCGv t0 = tcg_temp_new(); \
2034 tcg_gen_mov_tl(t0, arg1); \
2035 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2036 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2037 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2038 tcg_temp_free(t0); \
2041 #define OP_LD_ATOMIC(insn,fname) \
2042 static inline void op_ld_##insn(TCGv ret, TCGv arg1, DisasContext *ctx) \
2044 gen_helper_1e1i(insn, ret, arg1, ctx->mem_idx); \
2047 OP_LD_ATOMIC(ll
,ld32s
);
2048 #if defined(TARGET_MIPS64)
2049 OP_LD_ATOMIC(lld
,ld64
);
2053 #ifdef CONFIG_USER_ONLY
2054 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2055 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2057 TCGv t0 = tcg_temp_new(); \
2058 TCGLabel *l1 = gen_new_label(); \
2059 TCGLabel *l2 = gen_new_label(); \
2061 tcg_gen_andi_tl(t0, arg2, almask); \
2062 tcg_gen_brcondi_tl(TCG_COND_EQ, t0, 0, l1); \
2063 tcg_gen_st_tl(arg2, cpu_env, offsetof(CPUMIPSState, CP0_BadVAddr)); \
2064 generate_exception(ctx, EXCP_AdES); \
2065 gen_set_label(l1); \
2066 tcg_gen_ld_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2067 tcg_gen_brcond_tl(TCG_COND_NE, arg2, t0, l2); \
2068 tcg_gen_movi_tl(t0, rt | ((almask << 3) & 0x20)); \
2069 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, llreg)); \
2070 tcg_gen_st_tl(arg1, cpu_env, offsetof(CPUMIPSState, llnewval)); \
2071 generate_exception_end(ctx, EXCP_SC); \
2072 gen_set_label(l2); \
2073 tcg_gen_movi_tl(t0, 0); \
2074 gen_store_gpr(t0, rt); \
2075 tcg_temp_free(t0); \
2078 #define OP_ST_ATOMIC(insn,fname,ldname,almask) \
2079 static inline void op_st_##insn(TCGv arg1, TCGv arg2, int rt, DisasContext *ctx) \
2081 TCGv t0 = tcg_temp_new(); \
2082 gen_helper_1e2i(insn, t0, arg1, arg2, ctx->mem_idx); \
2083 gen_store_gpr(t0, rt); \
2084 tcg_temp_free(t0); \
2087 OP_ST_ATOMIC(sc
,st32
,ld32s
,0x3);
2088 #if defined(TARGET_MIPS64)
2089 OP_ST_ATOMIC(scd
,st64
,ld64
,0x7);
2093 static void gen_base_offset_addr (DisasContext
*ctx
, TCGv addr
,
2094 int base
, int16_t offset
)
2097 tcg_gen_movi_tl(addr
, offset
);
2098 } else if (offset
== 0) {
2099 gen_load_gpr(addr
, base
);
2101 tcg_gen_movi_tl(addr
, offset
);
2102 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2106 static target_ulong
pc_relative_pc (DisasContext
*ctx
)
2108 target_ulong pc
= ctx
->pc
;
2110 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2111 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2116 pc
&= ~(target_ulong
)3;
2121 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2122 int rt
, int base
, int16_t offset
)
2126 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
)) {
2127 /* Loongson CPU uses a load to zero register for prefetch.
2128 We emulate it as a NOP. On other CPU we must perform the
2129 actual memory access. */
2133 t0
= tcg_temp_new();
2134 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2137 #if defined(TARGET_MIPS64)
2139 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2140 ctx
->default_tcg_memop_mask
);
2141 gen_store_gpr(t0
, rt
);
2144 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2145 ctx
->default_tcg_memop_mask
);
2146 gen_store_gpr(t0
, rt
);
2150 op_ld_lld(t0
, t0
, ctx
);
2151 gen_store_gpr(t0
, rt
);
2154 t1
= tcg_temp_new();
2155 /* Do a byte access to possibly trigger a page
2156 fault with the unaligned address. */
2157 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2158 tcg_gen_andi_tl(t1
, t0
, 7);
2159 #ifndef TARGET_WORDS_BIGENDIAN
2160 tcg_gen_xori_tl(t1
, t1
, 7);
2162 tcg_gen_shli_tl(t1
, t1
, 3);
2163 tcg_gen_andi_tl(t0
, t0
, ~7);
2164 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2165 tcg_gen_shl_tl(t0
, t0
, t1
);
2166 t2
= tcg_const_tl(-1);
2167 tcg_gen_shl_tl(t2
, t2
, t1
);
2168 gen_load_gpr(t1
, rt
);
2169 tcg_gen_andc_tl(t1
, t1
, t2
);
2171 tcg_gen_or_tl(t0
, t0
, t1
);
2173 gen_store_gpr(t0
, rt
);
2176 t1
= tcg_temp_new();
2177 /* Do a byte access to possibly trigger a page
2178 fault with the unaligned address. */
2179 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2180 tcg_gen_andi_tl(t1
, t0
, 7);
2181 #ifdef TARGET_WORDS_BIGENDIAN
2182 tcg_gen_xori_tl(t1
, t1
, 7);
2184 tcg_gen_shli_tl(t1
, t1
, 3);
2185 tcg_gen_andi_tl(t0
, t0
, ~7);
2186 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2187 tcg_gen_shr_tl(t0
, t0
, t1
);
2188 tcg_gen_xori_tl(t1
, t1
, 63);
2189 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2190 tcg_gen_shl_tl(t2
, t2
, t1
);
2191 gen_load_gpr(t1
, rt
);
2192 tcg_gen_and_tl(t1
, t1
, t2
);
2194 tcg_gen_or_tl(t0
, t0
, t1
);
2196 gen_store_gpr(t0
, rt
);
2199 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2200 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2202 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
2203 gen_store_gpr(t0
, rt
);
2207 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2208 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2210 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
2211 gen_store_gpr(t0
, rt
);
2214 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
2215 ctx
->default_tcg_memop_mask
);
2216 gen_store_gpr(t0
, rt
);
2219 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
2220 ctx
->default_tcg_memop_mask
);
2221 gen_store_gpr(t0
, rt
);
2224 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUW
|
2225 ctx
->default_tcg_memop_mask
);
2226 gen_store_gpr(t0
, rt
);
2229 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
2230 gen_store_gpr(t0
, rt
);
2233 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
2234 gen_store_gpr(t0
, rt
);
2237 t1
= tcg_temp_new();
2238 /* Do a byte access to possibly trigger a page
2239 fault with the unaligned address. */
2240 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2241 tcg_gen_andi_tl(t1
, t0
, 3);
2242 #ifndef TARGET_WORDS_BIGENDIAN
2243 tcg_gen_xori_tl(t1
, t1
, 3);
2245 tcg_gen_shli_tl(t1
, t1
, 3);
2246 tcg_gen_andi_tl(t0
, t0
, ~3);
2247 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2248 tcg_gen_shl_tl(t0
, t0
, t1
);
2249 t2
= tcg_const_tl(-1);
2250 tcg_gen_shl_tl(t2
, t2
, t1
);
2251 gen_load_gpr(t1
, rt
);
2252 tcg_gen_andc_tl(t1
, t1
, t2
);
2254 tcg_gen_or_tl(t0
, t0
, t1
);
2256 tcg_gen_ext32s_tl(t0
, t0
);
2257 gen_store_gpr(t0
, rt
);
2260 t1
= tcg_temp_new();
2261 /* Do a byte access to possibly trigger a page
2262 fault with the unaligned address. */
2263 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
2264 tcg_gen_andi_tl(t1
, t0
, 3);
2265 #ifdef TARGET_WORDS_BIGENDIAN
2266 tcg_gen_xori_tl(t1
, t1
, 3);
2268 tcg_gen_shli_tl(t1
, t1
, 3);
2269 tcg_gen_andi_tl(t0
, t0
, ~3);
2270 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
2271 tcg_gen_shr_tl(t0
, t0
, t1
);
2272 tcg_gen_xori_tl(t1
, t1
, 31);
2273 t2
= tcg_const_tl(0xfffffffeull
);
2274 tcg_gen_shl_tl(t2
, t2
, t1
);
2275 gen_load_gpr(t1
, rt
);
2276 tcg_gen_and_tl(t1
, t1
, t2
);
2278 tcg_gen_or_tl(t0
, t0
, t1
);
2280 tcg_gen_ext32s_tl(t0
, t0
);
2281 gen_store_gpr(t0
, rt
);
2285 op_ld_ll(t0
, t0
, ctx
);
2286 gen_store_gpr(t0
, rt
);
2293 static void gen_st (DisasContext
*ctx
, uint32_t opc
, int rt
,
2294 int base
, int16_t offset
)
2296 TCGv t0
= tcg_temp_new();
2297 TCGv t1
= tcg_temp_new();
2299 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2300 gen_load_gpr(t1
, rt
);
2302 #if defined(TARGET_MIPS64)
2304 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
2305 ctx
->default_tcg_memop_mask
);
2308 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
2311 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
2315 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
2316 ctx
->default_tcg_memop_mask
);
2319 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
2320 ctx
->default_tcg_memop_mask
);
2323 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_8
);
2326 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
2329 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
2337 /* Store conditional */
2338 static void gen_st_cond (DisasContext
*ctx
, uint32_t opc
, int rt
,
2339 int base
, int16_t offset
)
2343 #ifdef CONFIG_USER_ONLY
2344 t0
= tcg_temp_local_new();
2345 t1
= tcg_temp_local_new();
2347 t0
= tcg_temp_new();
2348 t1
= tcg_temp_new();
2350 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2351 gen_load_gpr(t1
, rt
);
2353 #if defined(TARGET_MIPS64)
2356 op_st_scd(t1
, t0
, rt
, ctx
);
2361 op_st_sc(t1
, t0
, rt
, ctx
);
2368 /* Load and store */
2369 static void gen_flt_ldst (DisasContext
*ctx
, uint32_t opc
, int ft
,
2370 int base
, int16_t offset
)
2372 TCGv t0
= tcg_temp_new();
2374 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2375 /* Don't do NOP if destination is zero: we must perform the actual
2380 TCGv_i32 fp0
= tcg_temp_new_i32();
2381 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
2382 ctx
->default_tcg_memop_mask
);
2383 gen_store_fpr32(ctx
, fp0
, ft
);
2384 tcg_temp_free_i32(fp0
);
2389 TCGv_i32 fp0
= tcg_temp_new_i32();
2390 gen_load_fpr32(ctx
, fp0
, ft
);
2391 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
2392 ctx
->default_tcg_memop_mask
);
2393 tcg_temp_free_i32(fp0
);
2398 TCGv_i64 fp0
= tcg_temp_new_i64();
2399 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2400 ctx
->default_tcg_memop_mask
);
2401 gen_store_fpr64(ctx
, fp0
, ft
);
2402 tcg_temp_free_i64(fp0
);
2407 TCGv_i64 fp0
= tcg_temp_new_i64();
2408 gen_load_fpr64(ctx
, fp0
, ft
);
2409 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
2410 ctx
->default_tcg_memop_mask
);
2411 tcg_temp_free_i64(fp0
);
2415 MIPS_INVAL("flt_ldst");
2416 generate_exception_end(ctx
, EXCP_RI
);
2423 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
2424 int rs
, int16_t imm
)
2426 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
2427 check_cp1_enabled(ctx
);
2431 check_insn(ctx
, ISA_MIPS2
);
2434 gen_flt_ldst(ctx
, op
, rt
, rs
, imm
);
2437 generate_exception_err(ctx
, EXCP_CpU
, 1);
2441 /* Arithmetic with immediate operand */
2442 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
2443 int rt
, int rs
, int16_t imm
)
2445 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2447 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
2448 /* If no destination, treat it as a NOP.
2449 For addi, we must generate the overflow exception when needed. */
2455 TCGv t0
= tcg_temp_local_new();
2456 TCGv t1
= tcg_temp_new();
2457 TCGv t2
= tcg_temp_new();
2458 TCGLabel
*l1
= gen_new_label();
2460 gen_load_gpr(t1
, rs
);
2461 tcg_gen_addi_tl(t0
, t1
, uimm
);
2462 tcg_gen_ext32s_tl(t0
, t0
);
2464 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2465 tcg_gen_xori_tl(t2
, t0
, uimm
);
2466 tcg_gen_and_tl(t1
, t1
, t2
);
2468 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2470 /* operands of same sign, result different sign */
2471 generate_exception(ctx
, EXCP_OVERFLOW
);
2473 tcg_gen_ext32s_tl(t0
, t0
);
2474 gen_store_gpr(t0
, rt
);
2480 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2481 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2483 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2486 #if defined(TARGET_MIPS64)
2489 TCGv t0
= tcg_temp_local_new();
2490 TCGv t1
= tcg_temp_new();
2491 TCGv t2
= tcg_temp_new();
2492 TCGLabel
*l1
= gen_new_label();
2494 gen_load_gpr(t1
, rs
);
2495 tcg_gen_addi_tl(t0
, t1
, uimm
);
2497 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
2498 tcg_gen_xori_tl(t2
, t0
, uimm
);
2499 tcg_gen_and_tl(t1
, t1
, t2
);
2501 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2503 /* operands of same sign, result different sign */
2504 generate_exception(ctx
, EXCP_OVERFLOW
);
2506 gen_store_gpr(t0
, rt
);
2512 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2514 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2521 /* Logic with immediate operand */
2522 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
2523 int rt
, int rs
, int16_t imm
)
2528 /* If no destination, treat it as a NOP. */
2531 uimm
= (uint16_t)imm
;
2534 if (likely(rs
!= 0))
2535 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2537 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
2541 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2543 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2546 if (likely(rs
!= 0))
2547 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
2549 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
2552 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS32R6
)) {
2554 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
2555 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
2557 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
2566 /* Set on less than with immediate operand */
2567 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
2568 int rt
, int rs
, int16_t imm
)
2570 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
2574 /* If no destination, treat it as a NOP. */
2577 t0
= tcg_temp_new();
2578 gen_load_gpr(t0
, rs
);
2581 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
2584 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
2590 /* Shifts with immediate operand */
2591 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
2592 int rt
, int rs
, int16_t imm
)
2594 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
2598 /* If no destination, treat it as a NOP. */
2602 t0
= tcg_temp_new();
2603 gen_load_gpr(t0
, rs
);
2606 tcg_gen_shli_tl(t0
, t0
, uimm
);
2607 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2610 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2614 tcg_gen_ext32u_tl(t0
, t0
);
2615 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2617 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2622 TCGv_i32 t1
= tcg_temp_new_i32();
2624 tcg_gen_trunc_tl_i32(t1
, t0
);
2625 tcg_gen_rotri_i32(t1
, t1
, uimm
);
2626 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
2627 tcg_temp_free_i32(t1
);
2629 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
2632 #if defined(TARGET_MIPS64)
2634 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
2637 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
2640 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
2644 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
2646 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
2650 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2653 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2656 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2659 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
2667 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
2668 int rd
, int rs
, int rt
)
2670 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
2671 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
2672 /* If no destination, treat it as a NOP.
2673 For add & sub, we must generate the overflow exception when needed. */
2680 TCGv t0
= tcg_temp_local_new();
2681 TCGv t1
= tcg_temp_new();
2682 TCGv t2
= tcg_temp_new();
2683 TCGLabel
*l1
= gen_new_label();
2685 gen_load_gpr(t1
, rs
);
2686 gen_load_gpr(t2
, rt
);
2687 tcg_gen_add_tl(t0
, t1
, t2
);
2688 tcg_gen_ext32s_tl(t0
, t0
);
2689 tcg_gen_xor_tl(t1
, t1
, t2
);
2690 tcg_gen_xor_tl(t2
, t0
, t2
);
2691 tcg_gen_andc_tl(t1
, t2
, t1
);
2693 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2695 /* operands of same sign, result different sign */
2696 generate_exception(ctx
, EXCP_OVERFLOW
);
2698 gen_store_gpr(t0
, rd
);
2703 if (rs
!= 0 && rt
!= 0) {
2704 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2705 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2706 } else if (rs
== 0 && rt
!= 0) {
2707 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2708 } else if (rs
!= 0 && rt
== 0) {
2709 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2711 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2716 TCGv t0
= tcg_temp_local_new();
2717 TCGv t1
= tcg_temp_new();
2718 TCGv t2
= tcg_temp_new();
2719 TCGLabel
*l1
= gen_new_label();
2721 gen_load_gpr(t1
, rs
);
2722 gen_load_gpr(t2
, rt
);
2723 tcg_gen_sub_tl(t0
, t1
, t2
);
2724 tcg_gen_ext32s_tl(t0
, t0
);
2725 tcg_gen_xor_tl(t2
, t1
, t2
);
2726 tcg_gen_xor_tl(t1
, t0
, t1
);
2727 tcg_gen_and_tl(t1
, t1
, t2
);
2729 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2731 /* operands of different sign, first operand and result different sign */
2732 generate_exception(ctx
, EXCP_OVERFLOW
);
2734 gen_store_gpr(t0
, rd
);
2739 if (rs
!= 0 && rt
!= 0) {
2740 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2741 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2742 } else if (rs
== 0 && rt
!= 0) {
2743 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2744 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2745 } else if (rs
!= 0 && rt
== 0) {
2746 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2748 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2751 #if defined(TARGET_MIPS64)
2754 TCGv t0
= tcg_temp_local_new();
2755 TCGv t1
= tcg_temp_new();
2756 TCGv t2
= tcg_temp_new();
2757 TCGLabel
*l1
= gen_new_label();
2759 gen_load_gpr(t1
, rs
);
2760 gen_load_gpr(t2
, rt
);
2761 tcg_gen_add_tl(t0
, t1
, t2
);
2762 tcg_gen_xor_tl(t1
, t1
, t2
);
2763 tcg_gen_xor_tl(t2
, t0
, t2
);
2764 tcg_gen_andc_tl(t1
, t2
, t1
);
2766 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2768 /* operands of same sign, result different sign */
2769 generate_exception(ctx
, EXCP_OVERFLOW
);
2771 gen_store_gpr(t0
, rd
);
2776 if (rs
!= 0 && rt
!= 0) {
2777 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2778 } else if (rs
== 0 && rt
!= 0) {
2779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2780 } else if (rs
!= 0 && rt
== 0) {
2781 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2783 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2788 TCGv t0
= tcg_temp_local_new();
2789 TCGv t1
= tcg_temp_new();
2790 TCGv t2
= tcg_temp_new();
2791 TCGLabel
*l1
= gen_new_label();
2793 gen_load_gpr(t1
, rs
);
2794 gen_load_gpr(t2
, rt
);
2795 tcg_gen_sub_tl(t0
, t1
, t2
);
2796 tcg_gen_xor_tl(t2
, t1
, t2
);
2797 tcg_gen_xor_tl(t1
, t0
, t1
);
2798 tcg_gen_and_tl(t1
, t1
, t2
);
2800 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
2802 /* operands of different sign, first operand and result different sign */
2803 generate_exception(ctx
, EXCP_OVERFLOW
);
2805 gen_store_gpr(t0
, rd
);
2810 if (rs
!= 0 && rt
!= 0) {
2811 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2812 } else if (rs
== 0 && rt
!= 0) {
2813 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2814 } else if (rs
!= 0 && rt
== 0) {
2815 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2822 if (likely(rs
!= 0 && rt
!= 0)) {
2823 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2824 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
2826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2832 /* Conditional move */
2833 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
2834 int rd
, int rs
, int rt
)
2839 /* If no destination, treat it as a NOP. */
2843 t0
= tcg_temp_new();
2844 gen_load_gpr(t0
, rt
);
2845 t1
= tcg_const_tl(0);
2846 t2
= tcg_temp_new();
2847 gen_load_gpr(t2
, rs
);
2850 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2853 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
2856 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2859 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
2868 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
2869 int rd
, int rs
, int rt
)
2872 /* If no destination, treat it as a NOP. */
2878 if (likely(rs
!= 0 && rt
!= 0)) {
2879 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2881 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2885 if (rs
!= 0 && rt
!= 0) {
2886 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2887 } else if (rs
== 0 && rt
!= 0) {
2888 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2889 } else if (rs
!= 0 && rt
== 0) {
2890 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2892 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
2896 if (likely(rs
!= 0 && rt
!= 0)) {
2897 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2898 } else if (rs
== 0 && rt
!= 0) {
2899 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2900 } else if (rs
!= 0 && rt
== 0) {
2901 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2903 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2907 if (likely(rs
!= 0 && rt
!= 0)) {
2908 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
2909 } else if (rs
== 0 && rt
!= 0) {
2910 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
2911 } else if (rs
!= 0 && rt
== 0) {
2912 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
2914 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
2920 /* Set on lower than */
2921 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
2922 int rd
, int rs
, int rt
)
2927 /* If no destination, treat it as a NOP. */
2931 t0
= tcg_temp_new();
2932 t1
= tcg_temp_new();
2933 gen_load_gpr(t0
, rs
);
2934 gen_load_gpr(t1
, rt
);
2937 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
2940 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
2948 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
2949 int rd
, int rs
, int rt
)
2954 /* If no destination, treat it as a NOP.
2955 For add & sub, we must generate the overflow exception when needed. */
2959 t0
= tcg_temp_new();
2960 t1
= tcg_temp_new();
2961 gen_load_gpr(t0
, rs
);
2962 gen_load_gpr(t1
, rt
);
2965 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2966 tcg_gen_shl_tl(t0
, t1
, t0
);
2967 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2970 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2971 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
2974 tcg_gen_ext32u_tl(t1
, t1
);
2975 tcg_gen_andi_tl(t0
, t0
, 0x1f);
2976 tcg_gen_shr_tl(t0
, t1
, t0
);
2977 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
2981 TCGv_i32 t2
= tcg_temp_new_i32();
2982 TCGv_i32 t3
= tcg_temp_new_i32();
2984 tcg_gen_trunc_tl_i32(t2
, t0
);
2985 tcg_gen_trunc_tl_i32(t3
, t1
);
2986 tcg_gen_andi_i32(t2
, t2
, 0x1f);
2987 tcg_gen_rotr_i32(t2
, t3
, t2
);
2988 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
2989 tcg_temp_free_i32(t2
);
2990 tcg_temp_free_i32(t3
);
2993 #if defined(TARGET_MIPS64)
2995 tcg_gen_andi_tl(t0
, t0
, 0x3f);
2996 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
2999 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3000 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3003 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3004 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3007 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3008 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3016 /* Arithmetic on HI/LO registers */
3017 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3019 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3030 #if defined(TARGET_MIPS64)
3032 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3036 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3040 #if defined(TARGET_MIPS64)
3042 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3046 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3051 #if defined(TARGET_MIPS64)
3053 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3057 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3060 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3065 #if defined(TARGET_MIPS64)
3067 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3071 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3074 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3080 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3083 TCGv t0
= tcg_const_tl(addr
);
3084 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3085 gen_store_gpr(t0
, reg
);
3089 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
3095 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
3098 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3099 addr
= addr_add(ctx
, pc
, offset
);
3100 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3104 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3105 addr
= addr_add(ctx
, pc
, offset
);
3106 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
3108 #if defined(TARGET_MIPS64)
3111 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
3112 addr
= addr_add(ctx
, pc
, offset
);
3113 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
3117 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
3120 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3121 addr
= addr_add(ctx
, pc
, offset
);
3122 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3127 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
3128 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
3129 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
3132 #if defined(TARGET_MIPS64)
3133 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
3134 case R6_OPC_LDPC
+ (1 << 16):
3135 case R6_OPC_LDPC
+ (2 << 16):
3136 case R6_OPC_LDPC
+ (3 << 16):
3138 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
3139 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
3140 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
3144 MIPS_INVAL("OPC_PCREL");
3145 generate_exception_end(ctx
, EXCP_RI
);
3152 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
3161 t0
= tcg_temp_new();
3162 t1
= tcg_temp_new();
3164 gen_load_gpr(t0
, rs
);
3165 gen_load_gpr(t1
, rt
);
3170 TCGv t2
= tcg_temp_new();
3171 TCGv t3
= tcg_temp_new();
3172 tcg_gen_ext32s_tl(t0
, t0
);
3173 tcg_gen_ext32s_tl(t1
, t1
);
3174 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3175 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3176 tcg_gen_and_tl(t2
, t2
, t3
);
3177 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3178 tcg_gen_or_tl(t2
, t2
, t3
);
3179 tcg_gen_movi_tl(t3
, 0);
3180 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3181 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3182 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3189 TCGv t2
= tcg_temp_new();
3190 TCGv t3
= tcg_temp_new();
3191 tcg_gen_ext32s_tl(t0
, t0
);
3192 tcg_gen_ext32s_tl(t1
, t1
);
3193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3194 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3195 tcg_gen_and_tl(t2
, t2
, t3
);
3196 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3197 tcg_gen_or_tl(t2
, t2
, t3
);
3198 tcg_gen_movi_tl(t3
, 0);
3199 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3200 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3201 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3208 TCGv t2
= tcg_const_tl(0);
3209 TCGv t3
= tcg_const_tl(1);
3210 tcg_gen_ext32u_tl(t0
, t0
);
3211 tcg_gen_ext32u_tl(t1
, t1
);
3212 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3213 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3214 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3221 TCGv t2
= tcg_const_tl(0);
3222 TCGv t3
= tcg_const_tl(1);
3223 tcg_gen_ext32u_tl(t0
, t0
);
3224 tcg_gen_ext32u_tl(t1
, t1
);
3225 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3226 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3227 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3234 TCGv_i32 t2
= tcg_temp_new_i32();
3235 TCGv_i32 t3
= tcg_temp_new_i32();
3236 tcg_gen_trunc_tl_i32(t2
, t0
);
3237 tcg_gen_trunc_tl_i32(t3
, t1
);
3238 tcg_gen_mul_i32(t2
, t2
, t3
);
3239 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3240 tcg_temp_free_i32(t2
);
3241 tcg_temp_free_i32(t3
);
3246 TCGv_i32 t2
= tcg_temp_new_i32();
3247 TCGv_i32 t3
= tcg_temp_new_i32();
3248 tcg_gen_trunc_tl_i32(t2
, t0
);
3249 tcg_gen_trunc_tl_i32(t3
, t1
);
3250 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3251 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3252 tcg_temp_free_i32(t2
);
3253 tcg_temp_free_i32(t3
);
3258 TCGv_i32 t2
= tcg_temp_new_i32();
3259 TCGv_i32 t3
= tcg_temp_new_i32();
3260 tcg_gen_trunc_tl_i32(t2
, t0
);
3261 tcg_gen_trunc_tl_i32(t3
, t1
);
3262 tcg_gen_mul_i32(t2
, t2
, t3
);
3263 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3264 tcg_temp_free_i32(t2
);
3265 tcg_temp_free_i32(t3
);
3270 TCGv_i32 t2
= tcg_temp_new_i32();
3271 TCGv_i32 t3
= tcg_temp_new_i32();
3272 tcg_gen_trunc_tl_i32(t2
, t0
);
3273 tcg_gen_trunc_tl_i32(t3
, t1
);
3274 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3275 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
3276 tcg_temp_free_i32(t2
);
3277 tcg_temp_free_i32(t3
);
3280 #if defined(TARGET_MIPS64)
3283 TCGv t2
= tcg_temp_new();
3284 TCGv t3
= tcg_temp_new();
3285 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3286 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3287 tcg_gen_and_tl(t2
, t2
, t3
);
3288 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3289 tcg_gen_or_tl(t2
, t2
, t3
);
3290 tcg_gen_movi_tl(t3
, 0);
3291 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3292 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3299 TCGv t2
= tcg_temp_new();
3300 TCGv t3
= tcg_temp_new();
3301 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3302 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3303 tcg_gen_and_tl(t2
, t2
, t3
);
3304 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3305 tcg_gen_or_tl(t2
, t2
, t3
);
3306 tcg_gen_movi_tl(t3
, 0);
3307 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3308 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3315 TCGv t2
= tcg_const_tl(0);
3316 TCGv t3
= tcg_const_tl(1);
3317 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3318 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
3325 TCGv t2
= tcg_const_tl(0);
3326 TCGv t3
= tcg_const_tl(1);
3327 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3328 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
3334 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3338 TCGv t2
= tcg_temp_new();
3339 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3344 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
3348 TCGv t2
= tcg_temp_new();
3349 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
3355 MIPS_INVAL("r6 mul/div");
3356 generate_exception_end(ctx
, EXCP_RI
);
3364 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
3365 int acc
, int rs
, int rt
)
3369 t0
= tcg_temp_new();
3370 t1
= tcg_temp_new();
3372 gen_load_gpr(t0
, rs
);
3373 gen_load_gpr(t1
, rt
);
3382 TCGv t2
= tcg_temp_new();
3383 TCGv t3
= tcg_temp_new();
3384 tcg_gen_ext32s_tl(t0
, t0
);
3385 tcg_gen_ext32s_tl(t1
, t1
);
3386 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
3387 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
3388 tcg_gen_and_tl(t2
, t2
, t3
);
3389 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3390 tcg_gen_or_tl(t2
, t2
, t3
);
3391 tcg_gen_movi_tl(t3
, 0);
3392 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3393 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3394 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3395 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3396 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3403 TCGv t2
= tcg_const_tl(0);
3404 TCGv t3
= tcg_const_tl(1);
3405 tcg_gen_ext32u_tl(t0
, t0
);
3406 tcg_gen_ext32u_tl(t1
, t1
);
3407 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3408 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
3409 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
3410 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
3411 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
3418 TCGv_i32 t2
= tcg_temp_new_i32();
3419 TCGv_i32 t3
= tcg_temp_new_i32();
3420 tcg_gen_trunc_tl_i32(t2
, t0
);
3421 tcg_gen_trunc_tl_i32(t3
, t1
);
3422 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
3423 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3424 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3425 tcg_temp_free_i32(t2
);
3426 tcg_temp_free_i32(t3
);
3431 TCGv_i32 t2
= tcg_temp_new_i32();
3432 TCGv_i32 t3
= tcg_temp_new_i32();
3433 tcg_gen_trunc_tl_i32(t2
, t0
);
3434 tcg_gen_trunc_tl_i32(t3
, t1
);
3435 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
3436 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
3437 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
3438 tcg_temp_free_i32(t2
);
3439 tcg_temp_free_i32(t3
);
3442 #if defined(TARGET_MIPS64)
3445 TCGv t2
= tcg_temp_new();
3446 TCGv t3
= tcg_temp_new();
3447 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
3448 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
3449 tcg_gen_and_tl(t2
, t2
, t3
);
3450 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
3451 tcg_gen_or_tl(t2
, t2
, t3
);
3452 tcg_gen_movi_tl(t3
, 0);
3453 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
3454 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
3455 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
3462 TCGv t2
= tcg_const_tl(0);
3463 TCGv t3
= tcg_const_tl(1);
3464 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
3465 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
3466 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
3472 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3475 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
3480 TCGv_i64 t2
= tcg_temp_new_i64();
3481 TCGv_i64 t3
= tcg_temp_new_i64();
3483 tcg_gen_ext_tl_i64(t2
, t0
);
3484 tcg_gen_ext_tl_i64(t3
, t1
);
3485 tcg_gen_mul_i64(t2
, t2
, t3
);
3486 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3487 tcg_gen_add_i64(t2
, t2
, t3
);
3488 tcg_temp_free_i64(t3
);
3489 gen_move_low32(cpu_LO
[acc
], t2
);
3490 gen_move_high32(cpu_HI
[acc
], t2
);
3491 tcg_temp_free_i64(t2
);
3496 TCGv_i64 t2
= tcg_temp_new_i64();
3497 TCGv_i64 t3
= tcg_temp_new_i64();
3499 tcg_gen_ext32u_tl(t0
, t0
);
3500 tcg_gen_ext32u_tl(t1
, t1
);
3501 tcg_gen_extu_tl_i64(t2
, t0
);
3502 tcg_gen_extu_tl_i64(t3
, t1
);
3503 tcg_gen_mul_i64(t2
, t2
, t3
);
3504 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3505 tcg_gen_add_i64(t2
, t2
, t3
);
3506 tcg_temp_free_i64(t3
);
3507 gen_move_low32(cpu_LO
[acc
], t2
);
3508 gen_move_high32(cpu_HI
[acc
], t2
);
3509 tcg_temp_free_i64(t2
);
3514 TCGv_i64 t2
= tcg_temp_new_i64();
3515 TCGv_i64 t3
= tcg_temp_new_i64();
3517 tcg_gen_ext_tl_i64(t2
, t0
);
3518 tcg_gen_ext_tl_i64(t3
, t1
);
3519 tcg_gen_mul_i64(t2
, t2
, t3
);
3520 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3521 tcg_gen_sub_i64(t2
, t3
, t2
);
3522 tcg_temp_free_i64(t3
);
3523 gen_move_low32(cpu_LO
[acc
], t2
);
3524 gen_move_high32(cpu_HI
[acc
], t2
);
3525 tcg_temp_free_i64(t2
);
3530 TCGv_i64 t2
= tcg_temp_new_i64();
3531 TCGv_i64 t3
= tcg_temp_new_i64();
3533 tcg_gen_ext32u_tl(t0
, t0
);
3534 tcg_gen_ext32u_tl(t1
, t1
);
3535 tcg_gen_extu_tl_i64(t2
, t0
);
3536 tcg_gen_extu_tl_i64(t3
, t1
);
3537 tcg_gen_mul_i64(t2
, t2
, t3
);
3538 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
3539 tcg_gen_sub_i64(t2
, t3
, t2
);
3540 tcg_temp_free_i64(t3
);
3541 gen_move_low32(cpu_LO
[acc
], t2
);
3542 gen_move_high32(cpu_HI
[acc
], t2
);
3543 tcg_temp_free_i64(t2
);
3547 MIPS_INVAL("mul/div");
3548 generate_exception_end(ctx
, EXCP_RI
);
3556 static void gen_mul_vr54xx (DisasContext
*ctx
, uint32_t opc
,
3557 int rd
, int rs
, int rt
)
3559 TCGv t0
= tcg_temp_new();
3560 TCGv t1
= tcg_temp_new();
3562 gen_load_gpr(t0
, rs
);
3563 gen_load_gpr(t1
, rt
);
3566 case OPC_VR54XX_MULS
:
3567 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
3569 case OPC_VR54XX_MULSU
:
3570 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
3572 case OPC_VR54XX_MACC
:
3573 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
3575 case OPC_VR54XX_MACCU
:
3576 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
3578 case OPC_VR54XX_MSAC
:
3579 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
3581 case OPC_VR54XX_MSACU
:
3582 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
3584 case OPC_VR54XX_MULHI
:
3585 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
3587 case OPC_VR54XX_MULHIU
:
3588 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
3590 case OPC_VR54XX_MULSHI
:
3591 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
3593 case OPC_VR54XX_MULSHIU
:
3594 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
3596 case OPC_VR54XX_MACCHI
:
3597 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
3599 case OPC_VR54XX_MACCHIU
:
3600 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
3602 case OPC_VR54XX_MSACHI
:
3603 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
3605 case OPC_VR54XX_MSACHIU
:
3606 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
3609 MIPS_INVAL("mul vr54xx");
3610 generate_exception_end(ctx
, EXCP_RI
);
3613 gen_store_gpr(t0
, rd
);
3620 static void gen_cl (DisasContext
*ctx
, uint32_t opc
,
3629 t0
= tcg_temp_new();
3630 gen_load_gpr(t0
, rs
);
3634 gen_helper_clo(cpu_gpr
[rd
], t0
);
3638 gen_helper_clz(cpu_gpr
[rd
], t0
);
3640 #if defined(TARGET_MIPS64)
3643 gen_helper_dclo(cpu_gpr
[rd
], t0
);
3647 gen_helper_dclz(cpu_gpr
[rd
], t0
);
3654 /* Godson integer instructions */
3655 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
3656 int rd
, int rs
, int rt
)
3668 case OPC_MULTU_G_2E
:
3669 case OPC_MULTU_G_2F
:
3670 #if defined(TARGET_MIPS64)
3671 case OPC_DMULT_G_2E
:
3672 case OPC_DMULT_G_2F
:
3673 case OPC_DMULTU_G_2E
:
3674 case OPC_DMULTU_G_2F
:
3676 t0
= tcg_temp_new();
3677 t1
= tcg_temp_new();
3680 t0
= tcg_temp_local_new();
3681 t1
= tcg_temp_local_new();
3685 gen_load_gpr(t0
, rs
);
3686 gen_load_gpr(t1
, rt
);
3691 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3692 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3694 case OPC_MULTU_G_2E
:
3695 case OPC_MULTU_G_2F
:
3696 tcg_gen_ext32u_tl(t0
, t0
);
3697 tcg_gen_ext32u_tl(t1
, t1
);
3698 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3699 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3704 TCGLabel
*l1
= gen_new_label();
3705 TCGLabel
*l2
= gen_new_label();
3706 TCGLabel
*l3
= gen_new_label();
3707 tcg_gen_ext32s_tl(t0
, t0
);
3708 tcg_gen_ext32s_tl(t1
, t1
);
3709 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3710 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3713 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3714 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3715 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3718 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3719 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3726 TCGLabel
*l1
= gen_new_label();
3727 TCGLabel
*l2
= gen_new_label();
3728 tcg_gen_ext32u_tl(t0
, t0
);
3729 tcg_gen_ext32u_tl(t1
, t1
);
3730 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3731 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3734 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3735 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3742 TCGLabel
*l1
= gen_new_label();
3743 TCGLabel
*l2
= gen_new_label();
3744 TCGLabel
*l3
= gen_new_label();
3745 tcg_gen_ext32u_tl(t0
, t0
);
3746 tcg_gen_ext32u_tl(t1
, t1
);
3747 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3748 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
3749 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
3751 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3754 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3755 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3762 TCGLabel
*l1
= gen_new_label();
3763 TCGLabel
*l2
= gen_new_label();
3764 tcg_gen_ext32u_tl(t0
, t0
);
3765 tcg_gen_ext32u_tl(t1
, t1
);
3766 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3767 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3770 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3771 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3775 #if defined(TARGET_MIPS64)
3776 case OPC_DMULT_G_2E
:
3777 case OPC_DMULT_G_2F
:
3778 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3780 case OPC_DMULTU_G_2E
:
3781 case OPC_DMULTU_G_2F
:
3782 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
3787 TCGLabel
*l1
= gen_new_label();
3788 TCGLabel
*l2
= gen_new_label();
3789 TCGLabel
*l3
= gen_new_label();
3790 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3791 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3794 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3795 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3796 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
3799 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
3803 case OPC_DDIVU_G_2E
:
3804 case OPC_DDIVU_G_2F
:
3806 TCGLabel
*l1
= gen_new_label();
3807 TCGLabel
*l2
= gen_new_label();
3808 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3809 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3812 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
3819 TCGLabel
*l1
= gen_new_label();
3820 TCGLabel
*l2
= gen_new_label();
3821 TCGLabel
*l3
= gen_new_label();
3822 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
3823 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
3824 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
3826 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3829 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
3833 case OPC_DMODU_G_2E
:
3834 case OPC_DMODU_G_2F
:
3836 TCGLabel
*l1
= gen_new_label();
3837 TCGLabel
*l2
= gen_new_label();
3838 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
3839 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3842 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
3853 /* Loongson multimedia instructions */
3854 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
3856 uint32_t opc
, shift_max
;
3859 opc
= MASK_LMI(ctx
->opcode
);
3865 t0
= tcg_temp_local_new_i64();
3866 t1
= tcg_temp_local_new_i64();
3869 t0
= tcg_temp_new_i64();
3870 t1
= tcg_temp_new_i64();
3874 gen_load_fpr64(ctx
, t0
, rs
);
3875 gen_load_fpr64(ctx
, t1
, rt
);
3877 #define LMI_HELPER(UP, LO) \
3878 case OPC_##UP: gen_helper_##LO(t0, t0, t1); break
3879 #define LMI_HELPER_1(UP, LO) \
3880 case OPC_##UP: gen_helper_##LO(t0, t0); break
3881 #define LMI_DIRECT(UP, LO, OP) \
3882 case OPC_##UP: tcg_gen_##OP##_i64(t0, t0, t1); break
3885 LMI_HELPER(PADDSH
, paddsh
);
3886 LMI_HELPER(PADDUSH
, paddush
);
3887 LMI_HELPER(PADDH
, paddh
);
3888 LMI_HELPER(PADDW
, paddw
);
3889 LMI_HELPER(PADDSB
, paddsb
);
3890 LMI_HELPER(PADDUSB
, paddusb
);
3891 LMI_HELPER(PADDB
, paddb
);
3893 LMI_HELPER(PSUBSH
, psubsh
);
3894 LMI_HELPER(PSUBUSH
, psubush
);
3895 LMI_HELPER(PSUBH
, psubh
);
3896 LMI_HELPER(PSUBW
, psubw
);
3897 LMI_HELPER(PSUBSB
, psubsb
);
3898 LMI_HELPER(PSUBUSB
, psubusb
);
3899 LMI_HELPER(PSUBB
, psubb
);
3901 LMI_HELPER(PSHUFH
, pshufh
);
3902 LMI_HELPER(PACKSSWH
, packsswh
);
3903 LMI_HELPER(PACKSSHB
, packsshb
);
3904 LMI_HELPER(PACKUSHB
, packushb
);
3906 LMI_HELPER(PUNPCKLHW
, punpcklhw
);
3907 LMI_HELPER(PUNPCKHHW
, punpckhhw
);
3908 LMI_HELPER(PUNPCKLBH
, punpcklbh
);
3909 LMI_HELPER(PUNPCKHBH
, punpckhbh
);
3910 LMI_HELPER(PUNPCKLWD
, punpcklwd
);
3911 LMI_HELPER(PUNPCKHWD
, punpckhwd
);
3913 LMI_HELPER(PAVGH
, pavgh
);
3914 LMI_HELPER(PAVGB
, pavgb
);
3915 LMI_HELPER(PMAXSH
, pmaxsh
);
3916 LMI_HELPER(PMINSH
, pminsh
);
3917 LMI_HELPER(PMAXUB
, pmaxub
);
3918 LMI_HELPER(PMINUB
, pminub
);
3920 LMI_HELPER(PCMPEQW
, pcmpeqw
);
3921 LMI_HELPER(PCMPGTW
, pcmpgtw
);
3922 LMI_HELPER(PCMPEQH
, pcmpeqh
);
3923 LMI_HELPER(PCMPGTH
, pcmpgth
);
3924 LMI_HELPER(PCMPEQB
, pcmpeqb
);
3925 LMI_HELPER(PCMPGTB
, pcmpgtb
);
3927 LMI_HELPER(PSLLW
, psllw
);
3928 LMI_HELPER(PSLLH
, psllh
);
3929 LMI_HELPER(PSRLW
, psrlw
);
3930 LMI_HELPER(PSRLH
, psrlh
);
3931 LMI_HELPER(PSRAW
, psraw
);
3932 LMI_HELPER(PSRAH
, psrah
);
3934 LMI_HELPER(PMULLH
, pmullh
);
3935 LMI_HELPER(PMULHH
, pmulhh
);
3936 LMI_HELPER(PMULHUH
, pmulhuh
);
3937 LMI_HELPER(PMADDHW
, pmaddhw
);
3939 LMI_HELPER(PASUBUB
, pasubub
);
3940 LMI_HELPER_1(BIADD
, biadd
);
3941 LMI_HELPER_1(PMOVMSKB
, pmovmskb
);
3943 LMI_DIRECT(PADDD
, paddd
, add
);
3944 LMI_DIRECT(PSUBD
, psubd
, sub
);
3945 LMI_DIRECT(XOR_CP2
, xor, xor);
3946 LMI_DIRECT(NOR_CP2
, nor
, nor
);
3947 LMI_DIRECT(AND_CP2
, and, and);
3948 LMI_DIRECT(PANDN
, pandn
, andc
);
3949 LMI_DIRECT(OR
, or, or);
3952 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
3955 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
3958 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
3961 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
3965 tcg_gen_andi_i64(t1
, t1
, 3);
3966 tcg_gen_shli_i64(t1
, t1
, 4);
3967 tcg_gen_shr_i64(t0
, t0
, t1
);
3968 tcg_gen_ext16u_i64(t0
, t0
);
3972 tcg_gen_add_i64(t0
, t0
, t1
);
3973 tcg_gen_ext32s_i64(t0
, t0
);
3976 tcg_gen_sub_i64(t0
, t0
, t1
);
3977 tcg_gen_ext32s_i64(t0
, t0
);
3999 /* Make sure shift count isn't TCG undefined behaviour. */
4000 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
4005 tcg_gen_shl_i64(t0
, t0
, t1
);
4009 /* Since SRA is UndefinedResult without sign-extended inputs,
4010 we can treat SRA and DSRA the same. */
4011 tcg_gen_sar_i64(t0
, t0
, t1
);
4014 /* We want to shift in zeros for SRL; zero-extend first. */
4015 tcg_gen_ext32u_i64(t0
, t0
);
4018 tcg_gen_shr_i64(t0
, t0
, t1
);
4022 if (shift_max
== 32) {
4023 tcg_gen_ext32s_i64(t0
, t0
);
4026 /* Shifts larger than MAX produce zero. */
4027 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
4028 tcg_gen_neg_i64(t1
, t1
);
4029 tcg_gen_and_i64(t0
, t0
, t1
);
4035 TCGv_i64 t2
= tcg_temp_new_i64();
4036 TCGLabel
*lab
= gen_new_label();
4038 tcg_gen_mov_i64(t2
, t0
);
4039 tcg_gen_add_i64(t0
, t1
, t2
);
4040 if (opc
== OPC_ADD_CP2
) {
4041 tcg_gen_ext32s_i64(t0
, t0
);
4043 tcg_gen_xor_i64(t1
, t1
, t2
);
4044 tcg_gen_xor_i64(t2
, t2
, t0
);
4045 tcg_gen_andc_i64(t1
, t2
, t1
);
4046 tcg_temp_free_i64(t2
);
4047 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4048 generate_exception(ctx
, EXCP_OVERFLOW
);
4056 TCGv_i64 t2
= tcg_temp_new_i64();
4057 TCGLabel
*lab
= gen_new_label();
4059 tcg_gen_mov_i64(t2
, t0
);
4060 tcg_gen_sub_i64(t0
, t1
, t2
);
4061 if (opc
== OPC_SUB_CP2
) {
4062 tcg_gen_ext32s_i64(t0
, t0
);
4064 tcg_gen_xor_i64(t1
, t1
, t2
);
4065 tcg_gen_xor_i64(t2
, t2
, t0
);
4066 tcg_gen_and_i64(t1
, t1
, t2
);
4067 tcg_temp_free_i64(t2
);
4068 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
4069 generate_exception(ctx
, EXCP_OVERFLOW
);
4075 tcg_gen_ext32u_i64(t0
, t0
);
4076 tcg_gen_ext32u_i64(t1
, t1
);
4077 tcg_gen_mul_i64(t0
, t0
, t1
);
4086 /* ??? Document is unclear: Set FCC[CC]. Does that mean the
4087 FD field is the CC field? */
4089 MIPS_INVAL("loongson_cp2");
4090 generate_exception_end(ctx
, EXCP_RI
);
4097 gen_store_fpr64(ctx
, t0
, rd
);
4099 tcg_temp_free_i64(t0
);
4100 tcg_temp_free_i64(t1
);
4104 static void gen_trap (DisasContext
*ctx
, uint32_t opc
,
4105 int rs
, int rt
, int16_t imm
)
4108 TCGv t0
= tcg_temp_new();
4109 TCGv t1
= tcg_temp_new();
4112 /* Load needed operands */
4120 /* Compare two registers */
4122 gen_load_gpr(t0
, rs
);
4123 gen_load_gpr(t1
, rt
);
4133 /* Compare register to immediate */
4134 if (rs
!= 0 || imm
!= 0) {
4135 gen_load_gpr(t0
, rs
);
4136 tcg_gen_movi_tl(t1
, (int32_t)imm
);
4143 case OPC_TEQ
: /* rs == rs */
4144 case OPC_TEQI
: /* r0 == 0 */
4145 case OPC_TGE
: /* rs >= rs */
4146 case OPC_TGEI
: /* r0 >= 0 */
4147 case OPC_TGEU
: /* rs >= rs unsigned */
4148 case OPC_TGEIU
: /* r0 >= 0 unsigned */
4150 generate_exception_end(ctx
, EXCP_TRAP
);
4152 case OPC_TLT
: /* rs < rs */
4153 case OPC_TLTI
: /* r0 < 0 */
4154 case OPC_TLTU
: /* rs < rs unsigned */
4155 case OPC_TLTIU
: /* r0 < 0 unsigned */
4156 case OPC_TNE
: /* rs != rs */
4157 case OPC_TNEI
: /* r0 != 0 */
4158 /* Never trap: treat as NOP. */
4162 TCGLabel
*l1
= gen_new_label();
4167 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
4171 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
4175 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
4179 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
4183 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
4187 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
4190 generate_exception(ctx
, EXCP_TRAP
);
4197 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
4199 if (unlikely(ctx
->singlestep_enabled
)) {
4203 #ifndef CONFIG_USER_ONLY
4204 return (ctx
->tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
4210 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
4212 if (use_goto_tb(ctx
, dest
)) {
4215 tcg_gen_exit_tb((uintptr_t)ctx
->tb
+ n
);
4218 if (ctx
->singlestep_enabled
) {
4219 save_cpu_state(ctx
, 0);
4220 gen_helper_raise_exception_debug(cpu_env
);
4226 /* Branches (before delay slot) */
4227 static void gen_compute_branch (DisasContext
*ctx
, uint32_t opc
,
4229 int rs
, int rt
, int32_t offset
,
4232 target_ulong btgt
= -1;
4234 int bcond_compute
= 0;
4235 TCGv t0
= tcg_temp_new();
4236 TCGv t1
= tcg_temp_new();
4238 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
4239 #ifdef MIPS_DEBUG_DISAS
4240 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
4241 TARGET_FMT_lx
"\n", ctx
->pc
);
4243 generate_exception_end(ctx
, EXCP_RI
);
4247 /* Load needed operands */
4253 /* Compare two registers */
4255 gen_load_gpr(t0
, rs
);
4256 gen_load_gpr(t1
, rt
);
4259 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4273 /* Compare to zero */
4275 gen_load_gpr(t0
, rs
);
4278 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4281 #if defined(TARGET_MIPS64)
4283 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
4285 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
4288 btgt
= ctx
->pc
+ insn_bytes
+ offset
;
4293 /* Jump to immediate */
4294 btgt
= ((ctx
->pc
+ insn_bytes
) & (int32_t)0xF0000000) | (uint32_t)offset
;
4298 /* Jump to register */
4299 if (offset
!= 0 && offset
!= 16) {
4300 /* Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
4301 others are reserved. */
4302 MIPS_INVAL("jump hint");
4303 generate_exception_end(ctx
, EXCP_RI
);
4306 gen_load_gpr(btarget
, rs
);
4309 MIPS_INVAL("branch/jump");
4310 generate_exception_end(ctx
, EXCP_RI
);
4313 if (bcond_compute
== 0) {
4314 /* No condition to be computed */
4316 case OPC_BEQ
: /* rx == rx */
4317 case OPC_BEQL
: /* rx == rx likely */
4318 case OPC_BGEZ
: /* 0 >= 0 */
4319 case OPC_BGEZL
: /* 0 >= 0 likely */
4320 case OPC_BLEZ
: /* 0 <= 0 */
4321 case OPC_BLEZL
: /* 0 <= 0 likely */
4323 ctx
->hflags
|= MIPS_HFLAG_B
;
4325 case OPC_BGEZAL
: /* 0 >= 0 */
4326 case OPC_BGEZALL
: /* 0 >= 0 likely */
4327 /* Always take and link */
4329 ctx
->hflags
|= MIPS_HFLAG_B
;
4331 case OPC_BNE
: /* rx != rx */
4332 case OPC_BGTZ
: /* 0 > 0 */
4333 case OPC_BLTZ
: /* 0 < 0 */
4336 case OPC_BLTZAL
: /* 0 < 0 */
4337 /* Handle as an unconditional branch to get correct delay
4340 btgt
= ctx
->pc
+ insn_bytes
+ delayslot_size
;
4341 ctx
->hflags
|= MIPS_HFLAG_B
;
4343 case OPC_BLTZALL
: /* 0 < 0 likely */
4344 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 8);
4345 /* Skip the instruction in the delay slot */
4348 case OPC_BNEL
: /* rx != rx likely */
4349 case OPC_BGTZL
: /* 0 > 0 likely */
4350 case OPC_BLTZL
: /* 0 < 0 likely */
4351 /* Skip the instruction in the delay slot */
4355 ctx
->hflags
|= MIPS_HFLAG_B
;
4358 ctx
->hflags
|= MIPS_HFLAG_BX
;
4362 ctx
->hflags
|= MIPS_HFLAG_B
;
4365 ctx
->hflags
|= MIPS_HFLAG_BR
;
4369 ctx
->hflags
|= MIPS_HFLAG_BR
;
4372 MIPS_INVAL("branch/jump");
4373 generate_exception_end(ctx
, EXCP_RI
);
4379 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4382 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
4385 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4388 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
4391 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4394 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4397 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4401 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
4405 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4408 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
4411 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4414 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
4417 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4420 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4423 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
4425 #if defined(TARGET_MIPS64)
4427 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
4431 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4434 ctx
->hflags
|= MIPS_HFLAG_BC
;
4437 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
4440 ctx
->hflags
|= MIPS_HFLAG_BL
;
4443 MIPS_INVAL("conditional branch/jump");
4444 generate_exception_end(ctx
, EXCP_RI
);
4449 ctx
->btarget
= btgt
;
4451 switch (delayslot_size
) {
4453 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
4456 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
4461 int post_delay
= insn_bytes
+ delayslot_size
;
4462 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
4464 tcg_gen_movi_tl(cpu_gpr
[blink
], ctx
->pc
+ post_delay
+ lowbit
);
4468 if (insn_bytes
== 2)
4469 ctx
->hflags
|= MIPS_HFLAG_B16
;
4474 /* special3 bitfield operations */
4475 static void gen_bitops (DisasContext
*ctx
, uint32_t opc
, int rt
,
4476 int rs
, int lsb
, int msb
)
4478 TCGv t0
= tcg_temp_new();
4479 TCGv t1
= tcg_temp_new();
4481 gen_load_gpr(t1
, rs
);
4484 if (lsb
+ msb
> 31) {
4487 tcg_gen_shri_tl(t0
, t1
, lsb
);
4489 tcg_gen_andi_tl(t0
, t0
, (1U << (msb
+ 1)) - 1);
4491 tcg_gen_ext32s_tl(t0
, t0
);
4494 #if defined(TARGET_MIPS64)
4503 if (lsb
+ msb
> 63) {
4506 tcg_gen_shri_tl(t0
, t1
, lsb
);
4508 tcg_gen_andi_tl(t0
, t0
, (1ULL << (msb
+ 1)) - 1);
4516 gen_load_gpr(t0
, rt
);
4517 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4518 tcg_gen_ext32s_tl(t0
, t0
);
4520 #if defined(TARGET_MIPS64)
4531 gen_load_gpr(t0
, rt
);
4532 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
4537 MIPS_INVAL("bitops");
4538 generate_exception_end(ctx
, EXCP_RI
);
4543 gen_store_gpr(t0
, rt
);
4548 static void gen_bshfl (DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
4553 /* If no destination, treat it as a NOP. */
4557 t0
= tcg_temp_new();
4558 gen_load_gpr(t0
, rt
);
4562 TCGv t1
= tcg_temp_new();
4564 tcg_gen_shri_tl(t1
, t0
, 8);
4565 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF);
4566 tcg_gen_shli_tl(t0
, t0
, 8);
4567 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF);
4568 tcg_gen_or_tl(t0
, t0
, t1
);
4570 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4574 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
4577 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
4579 #if defined(TARGET_MIPS64)
4582 TCGv t1
= tcg_temp_new();
4584 tcg_gen_shri_tl(t1
, t0
, 8);
4585 tcg_gen_andi_tl(t1
, t1
, 0x00FF00FF00FF00FFULL
);
4586 tcg_gen_shli_tl(t0
, t0
, 8);
4587 tcg_gen_andi_tl(t0
, t0
, ~0x00FF00FF00FF00FFULL
);
4588 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4594 TCGv t1
= tcg_temp_new();
4596 tcg_gen_shri_tl(t1
, t0
, 16);
4597 tcg_gen_andi_tl(t1
, t1
, 0x0000FFFF0000FFFFULL
);
4598 tcg_gen_shli_tl(t0
, t0
, 16);
4599 tcg_gen_andi_tl(t0
, t0
, ~0x0000FFFF0000FFFFULL
);
4600 tcg_gen_or_tl(t0
, t0
, t1
);
4601 tcg_gen_shri_tl(t1
, t0
, 32);
4602 tcg_gen_shli_tl(t0
, t0
, 32);
4603 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
4609 MIPS_INVAL("bsfhl");
4610 generate_exception_end(ctx
, EXCP_RI
);
4617 static void gen_lsa(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4626 t0
= tcg_temp_new();
4627 t1
= tcg_temp_new();
4628 gen_load_gpr(t0
, rs
);
4629 gen_load_gpr(t1
, rt
);
4630 tcg_gen_shli_tl(t0
, t0
, imm2
+ 1);
4631 tcg_gen_add_tl(cpu_gpr
[rd
], t0
, t1
);
4632 if (opc
== OPC_LSA
) {
4633 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4642 static void gen_align(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
,
4650 t0
= tcg_temp_new();
4651 gen_load_gpr(t0
, rt
);
4655 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
4657 #if defined(TARGET_MIPS64)
4659 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4664 TCGv t1
= tcg_temp_new();
4665 gen_load_gpr(t1
, rs
);
4669 TCGv_i64 t2
= tcg_temp_new_i64();
4670 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
4671 tcg_gen_shri_i64(t2
, t2
, 8 * (4 - bp
));
4672 gen_move_low32(cpu_gpr
[rd
], t2
);
4673 tcg_temp_free_i64(t2
);
4676 #if defined(TARGET_MIPS64)
4678 tcg_gen_shli_tl(t0
, t0
, 8 * bp
);
4679 tcg_gen_shri_tl(t1
, t1
, 8 * (8 - bp
));
4680 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
4690 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
4697 t0
= tcg_temp_new();
4698 gen_load_gpr(t0
, rt
);
4701 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
4703 #if defined(TARGET_MIPS64)
4705 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
4712 #ifndef CONFIG_USER_ONLY
4713 /* CP0 (MMU and control) */
4714 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
4716 TCGv_i64 t0
= tcg_temp_new_i64();
4717 TCGv_i64 t1
= tcg_temp_new_i64();
4719 tcg_gen_ext_tl_i64(t0
, arg
);
4720 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4721 #if defined(TARGET_MIPS64)
4722 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
4724 tcg_gen_concat32_i64(t1
, t1
, t0
);
4726 tcg_gen_st_i64(t1
, cpu_env
, off
);
4727 tcg_temp_free_i64(t1
);
4728 tcg_temp_free_i64(t0
);
4731 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
4733 TCGv_i64 t0
= tcg_temp_new_i64();
4734 TCGv_i64 t1
= tcg_temp_new_i64();
4736 tcg_gen_ext_tl_i64(t0
, arg
);
4737 tcg_gen_ld_i64(t1
, cpu_env
, off
);
4738 tcg_gen_concat32_i64(t1
, t1
, t0
);
4739 tcg_gen_st_i64(t1
, cpu_env
, off
);
4740 tcg_temp_free_i64(t1
);
4741 tcg_temp_free_i64(t0
);
4744 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
4746 TCGv_i64 t0
= tcg_temp_new_i64();
4748 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4749 #if defined(TARGET_MIPS64)
4750 tcg_gen_shri_i64(t0
, t0
, 30);
4752 tcg_gen_shri_i64(t0
, t0
, 32);
4754 gen_move_low32(arg
, t0
);
4755 tcg_temp_free_i64(t0
);
4758 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
4760 TCGv_i64 t0
= tcg_temp_new_i64();
4762 tcg_gen_ld_i64(t0
, cpu_env
, off
);
4763 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
4764 gen_move_low32(arg
, t0
);
4765 tcg_temp_free_i64(t0
);
4768 static inline void gen_mfc0_load32 (TCGv arg
, target_ulong off
)
4770 TCGv_i32 t0
= tcg_temp_new_i32();
4772 tcg_gen_ld_i32(t0
, cpu_env
, off
);
4773 tcg_gen_ext_i32_tl(arg
, t0
);
4774 tcg_temp_free_i32(t0
);
4777 static inline void gen_mfc0_load64 (TCGv arg
, target_ulong off
)
4779 tcg_gen_ld_tl(arg
, cpu_env
, off
);
4780 tcg_gen_ext32s_tl(arg
, arg
);
4783 static inline void gen_mtc0_store32 (TCGv arg
, target_ulong off
)
4785 TCGv_i32 t0
= tcg_temp_new_i32();
4787 tcg_gen_trunc_tl_i32(t0
, arg
);
4788 tcg_gen_st_i32(t0
, cpu_env
, off
);
4789 tcg_temp_free_i32(t0
);
4792 #define CP0_CHECK(c) \
4795 goto cp0_unimplemented; \
4799 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4801 const char *rn
= "invalid";
4803 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4809 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4813 goto cp0_unimplemented
;
4819 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4823 goto cp0_unimplemented
;
4829 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, lladdr
),
4830 ctx
->CP0_LLAddr_shift
);
4834 CP0_CHECK(ctx
->mrp
);
4835 gen_helper_mfhc0_maar(arg
, cpu_env
);
4839 goto cp0_unimplemented
;
4848 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
4852 goto cp0_unimplemented
;
4856 goto cp0_unimplemented
;
4859 (void)rn
; /* avoid a compiler warning */
4860 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4864 LOG_DISAS("mfhc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4865 tcg_gen_movi_tl(arg
, 0);
4868 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4870 const char *rn
= "invalid";
4871 uint64_t mask
= ctx
->PAMask
>> 36;
4873 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
4879 tcg_gen_andi_tl(arg
, arg
, mask
);
4880 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
4884 goto cp0_unimplemented
;
4890 tcg_gen_andi_tl(arg
, arg
, mask
);
4891 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
4895 goto cp0_unimplemented
;
4901 /* LLAddr is read-only (the only exception is bit 0 if LLB is
4902 supported); the CP0_LLAddr_rw_bitmask does not seem to be
4903 relevant for modern MIPS cores supporting MTHC0, therefore
4904 treating MTHC0 to LLAddr as NOP. */
4908 CP0_CHECK(ctx
->mrp
);
4909 gen_helper_mthc0_maar(cpu_env
, arg
);
4913 goto cp0_unimplemented
;
4922 tcg_gen_andi_tl(arg
, arg
, mask
);
4923 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
4927 goto cp0_unimplemented
;
4931 goto cp0_unimplemented
;
4934 (void)rn
; /* avoid a compiler warning */
4936 LOG_DISAS("mthc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
4939 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
4941 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
4942 tcg_gen_movi_tl(arg
, 0);
4944 tcg_gen_movi_tl(arg
, ~0);
4948 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
4950 const char *rn
= "invalid";
4953 check_insn(ctx
, ISA_MIPS32
);
4959 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
4963 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4964 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
4968 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4969 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
4973 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4974 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
4979 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
4983 goto cp0_unimplemented
;
4989 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
4990 gen_helper_mfc0_random(arg
, cpu_env
);
4994 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
4995 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
4999 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5000 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
5004 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5005 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
5009 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5010 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
5014 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5015 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
5019 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5020 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5021 rn
= "VPEScheFBack";
5024 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5025 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
5029 goto cp0_unimplemented
;
5036 TCGv_i64 tmp
= tcg_temp_new_i64();
5037 tcg_gen_ld_i64(tmp
, cpu_env
,
5038 offsetof(CPUMIPSState
, CP0_EntryLo0
));
5039 #if defined(TARGET_MIPS64)
5041 /* Move RI/XI fields to bits 31:30 */
5042 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5043 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5046 gen_move_low32(arg
, tmp
);
5047 tcg_temp_free_i64(tmp
);
5052 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5053 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
5057 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5058 gen_helper_mfc0_tcbind(arg
, cpu_env
);
5062 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5063 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
5067 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5068 gen_helper_mfc0_tchalt(arg
, cpu_env
);
5072 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5073 gen_helper_mfc0_tccontext(arg
, cpu_env
);
5077 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5078 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
5082 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5083 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
5087 goto cp0_unimplemented
;
5094 TCGv_i64 tmp
= tcg_temp_new_i64();
5095 tcg_gen_ld_i64(tmp
, cpu_env
,
5096 offsetof(CPUMIPSState
, CP0_EntryLo1
));
5097 #if defined(TARGET_MIPS64)
5099 /* Move RI/XI fields to bits 31:30 */
5100 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
5101 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
5104 gen_move_low32(arg
, tmp
);
5105 tcg_temp_free_i64(tmp
);
5111 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
5112 rn
= "GlobalNumber";
5115 goto cp0_unimplemented
;
5121 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
5122 tcg_gen_ext32s_tl(arg
, arg
);
5126 // gen_helper_mfc0_contextconfig(arg); /* SmartMIPS ASE */
5127 rn
= "ContextConfig";
5128 goto cp0_unimplemented
;
5131 CP0_CHECK(ctx
->ulri
);
5132 tcg_gen_ld32s_tl(arg
, cpu_env
,
5133 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5137 goto cp0_unimplemented
;
5143 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
5147 check_insn(ctx
, ISA_MIPS32R2
);
5148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
5152 goto cp0_unimplemented
;
5158 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
5162 check_insn(ctx
, ISA_MIPS32R2
);
5163 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
5167 check_insn(ctx
, ISA_MIPS32R2
);
5168 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
5172 check_insn(ctx
, ISA_MIPS32R2
);
5173 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
5177 check_insn(ctx
, ISA_MIPS32R2
);
5178 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
5182 check_insn(ctx
, ISA_MIPS32R2
);
5183 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
5187 goto cp0_unimplemented
;
5193 check_insn(ctx
, ISA_MIPS32R2
);
5194 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
5198 goto cp0_unimplemented
;
5204 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
5205 tcg_gen_ext32s_tl(arg
, arg
);
5210 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
5215 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
5219 goto cp0_unimplemented
;
5225 /* Mark as an IO operation because we read the time. */
5226 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5229 gen_helper_mfc0_count(arg
, cpu_env
);
5230 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5233 /* Break the TB to be able to take timer interrupts immediately
5234 after reading count. */
5235 ctx
->bstate
= BS_STOP
;
5238 /* 6,7 are implementation dependent */
5240 goto cp0_unimplemented
;
5246 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
5247 tcg_gen_ext32s_tl(arg
, arg
);
5251 goto cp0_unimplemented
;
5257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
5260 /* 6,7 are implementation dependent */
5262 goto cp0_unimplemented
;
5268 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
5272 check_insn(ctx
, ISA_MIPS32R2
);
5273 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
5277 check_insn(ctx
, ISA_MIPS32R2
);
5278 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
5282 check_insn(ctx
, ISA_MIPS32R2
);
5283 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5287 goto cp0_unimplemented
;
5293 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
5297 goto cp0_unimplemented
;
5303 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5304 tcg_gen_ext32s_tl(arg
, arg
);
5308 goto cp0_unimplemented
;
5314 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
5318 check_insn(ctx
, ISA_MIPS32R2
);
5319 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
5323 check_insn(ctx
, ISA_MIPS32R2
);
5324 CP0_CHECK(ctx
->cmgcr
);
5325 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
5326 tcg_gen_ext32s_tl(arg
, arg
);
5330 goto cp0_unimplemented
;
5336 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
5340 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
5344 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
5348 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
5352 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
5356 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
5359 /* 6,7 are implementation dependent */
5361 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
5365 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
5369 goto cp0_unimplemented
;
5375 gen_helper_mfc0_lladdr(arg
, cpu_env
);
5379 CP0_CHECK(ctx
->mrp
);
5380 gen_helper_mfc0_maar(arg
, cpu_env
);
5384 CP0_CHECK(ctx
->mrp
);
5385 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
5389 goto cp0_unimplemented
;
5395 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
5399 goto cp0_unimplemented
;
5405 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
5409 goto cp0_unimplemented
;
5415 #if defined(TARGET_MIPS64)
5416 check_insn(ctx
, ISA_MIPS3
);
5417 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
5418 tcg_gen_ext32s_tl(arg
, arg
);
5423 goto cp0_unimplemented
;
5427 /* Officially reserved, but sel 0 is used for R1x000 framemask */
5428 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
5431 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
5435 goto cp0_unimplemented
;
5439 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5440 rn
= "'Diagnostic"; /* implementation dependent */
5445 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
5449 // gen_helper_mfc0_tracecontrol(arg); /* PDtrace support */
5450 rn
= "TraceControl";
5453 // gen_helper_mfc0_tracecontrol2(arg); /* PDtrace support */
5454 rn
= "TraceControl2";
5457 // gen_helper_mfc0_usertracedata(arg); /* PDtrace support */
5458 rn
= "UserTraceData";
5461 // gen_helper_mfc0_tracebpc(arg); /* PDtrace support */
5465 goto cp0_unimplemented
;
5472 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
5473 tcg_gen_ext32s_tl(arg
, arg
);
5477 goto cp0_unimplemented
;
5483 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
5484 rn
= "Performance0";
5487 // gen_helper_mfc0_performance1(arg);
5488 rn
= "Performance1";
5491 // gen_helper_mfc0_performance2(arg);
5492 rn
= "Performance2";
5495 // gen_helper_mfc0_performance3(arg);
5496 rn
= "Performance3";
5499 // gen_helper_mfc0_performance4(arg);
5500 rn
= "Performance4";
5503 // gen_helper_mfc0_performance5(arg);
5504 rn
= "Performance5";
5507 // gen_helper_mfc0_performance6(arg);
5508 rn
= "Performance6";
5511 // gen_helper_mfc0_performance7(arg);
5512 rn
= "Performance7";
5515 goto cp0_unimplemented
;
5521 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
5525 goto cp0_unimplemented
;
5531 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
5535 goto cp0_unimplemented
;
5545 TCGv_i64 tmp
= tcg_temp_new_i64();
5546 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
5547 gen_move_low32(arg
, tmp
);
5548 tcg_temp_free_i64(tmp
);
5556 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
5560 goto cp0_unimplemented
;
5569 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
5576 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
5580 goto cp0_unimplemented
;
5586 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
5587 tcg_gen_ext32s_tl(arg
, arg
);
5591 goto cp0_unimplemented
;
5598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
5602 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
5603 tcg_gen_ld_tl(arg
, cpu_env
,
5604 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
5605 tcg_gen_ext32s_tl(arg
, arg
);
5609 goto cp0_unimplemented
;
5613 goto cp0_unimplemented
;
5615 (void)rn
; /* avoid a compiler warning */
5616 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5620 LOG_DISAS("mfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
5621 gen_mfc0_unimplemented(ctx
, arg
);
5624 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
5626 const char *rn
= "invalid";
5629 check_insn(ctx
, ISA_MIPS32
);
5631 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
5639 gen_helper_mtc0_index(cpu_env
, arg
);
5643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5644 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
5648 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5653 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5663 goto cp0_unimplemented
;
5673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5674 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
5678 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5679 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
5683 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5684 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
5688 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5689 gen_helper_mtc0_yqmask(cpu_env
, arg
);
5693 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5694 tcg_gen_st_tl(arg
, cpu_env
,
5695 offsetof(CPUMIPSState
, CP0_VPESchedule
));
5699 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5700 tcg_gen_st_tl(arg
, cpu_env
,
5701 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
5702 rn
= "VPEScheFBack";
5705 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5706 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
5710 goto cp0_unimplemented
;
5716 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
5720 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5721 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
5725 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5726 gen_helper_mtc0_tcbind(cpu_env
, arg
);
5730 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5731 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
5735 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5736 gen_helper_mtc0_tchalt(cpu_env
, arg
);
5740 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5741 gen_helper_mtc0_tccontext(cpu_env
, arg
);
5745 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5746 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
5750 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
5751 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
5755 goto cp0_unimplemented
;
5761 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
5767 rn
= "GlobalNumber";
5770 goto cp0_unimplemented
;
5776 gen_helper_mtc0_context(cpu_env
, arg
);
5780 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
5781 rn
= "ContextConfig";
5782 goto cp0_unimplemented
;
5785 CP0_CHECK(ctx
->ulri
);
5786 tcg_gen_st_tl(arg
, cpu_env
,
5787 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
5791 goto cp0_unimplemented
;
5797 gen_helper_mtc0_pagemask(cpu_env
, arg
);
5801 check_insn(ctx
, ISA_MIPS32R2
);
5802 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
5804 ctx
->bstate
= BS_STOP
;
5807 goto cp0_unimplemented
;
5813 gen_helper_mtc0_wired(cpu_env
, arg
);
5817 check_insn(ctx
, ISA_MIPS32R2
);
5818 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
5822 check_insn(ctx
, ISA_MIPS32R2
);
5823 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
5827 check_insn(ctx
, ISA_MIPS32R2
);
5828 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
5832 check_insn(ctx
, ISA_MIPS32R2
);
5833 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
5837 check_insn(ctx
, ISA_MIPS32R2
);
5838 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
5842 goto cp0_unimplemented
;
5848 check_insn(ctx
, ISA_MIPS32R2
);
5849 gen_helper_mtc0_hwrena(cpu_env
, arg
);
5850 ctx
->bstate
= BS_STOP
;
5854 goto cp0_unimplemented
;
5872 goto cp0_unimplemented
;
5878 gen_helper_mtc0_count(cpu_env
, arg
);
5881 /* 6,7 are implementation dependent */
5883 goto cp0_unimplemented
;
5889 gen_helper_mtc0_entryhi(cpu_env
, arg
);
5893 goto cp0_unimplemented
;
5899 gen_helper_mtc0_compare(cpu_env
, arg
);
5902 /* 6,7 are implementation dependent */
5904 goto cp0_unimplemented
;
5910 save_cpu_state(ctx
, 1);
5911 gen_helper_mtc0_status(cpu_env
, arg
);
5912 /* BS_STOP isn't good enough here, hflags may have changed. */
5913 gen_save_pc(ctx
->pc
+ 4);
5914 ctx
->bstate
= BS_EXCP
;
5918 check_insn(ctx
, ISA_MIPS32R2
);
5919 gen_helper_mtc0_intctl(cpu_env
, arg
);
5920 /* Stop translation as we may have switched the execution mode */
5921 ctx
->bstate
= BS_STOP
;
5925 check_insn(ctx
, ISA_MIPS32R2
);
5926 gen_helper_mtc0_srsctl(cpu_env
, arg
);
5927 /* Stop translation as we may have switched the execution mode */
5928 ctx
->bstate
= BS_STOP
;
5932 check_insn(ctx
, ISA_MIPS32R2
);
5933 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
5934 /* Stop translation as we may have switched the execution mode */
5935 ctx
->bstate
= BS_STOP
;
5939 goto cp0_unimplemented
;
5945 save_cpu_state(ctx
, 1);
5946 gen_helper_mtc0_cause(cpu_env
, arg
);
5950 goto cp0_unimplemented
;
5956 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
5960 goto cp0_unimplemented
;
5970 check_insn(ctx
, ISA_MIPS32R2
);
5971 gen_helper_mtc0_ebase(cpu_env
, arg
);
5975 goto cp0_unimplemented
;
5981 gen_helper_mtc0_config0(cpu_env
, arg
);
5983 /* Stop translation as we may have switched the execution mode */
5984 ctx
->bstate
= BS_STOP
;
5987 /* ignored, read only */
5991 gen_helper_mtc0_config2(cpu_env
, arg
);
5993 /* Stop translation as we may have switched the execution mode */
5994 ctx
->bstate
= BS_STOP
;
5997 gen_helper_mtc0_config3(cpu_env
, arg
);
5999 /* Stop translation as we may have switched the execution mode */
6000 ctx
->bstate
= BS_STOP
;
6003 gen_helper_mtc0_config4(cpu_env
, arg
);
6005 ctx
->bstate
= BS_STOP
;
6008 gen_helper_mtc0_config5(cpu_env
, arg
);
6010 /* Stop translation as we may have switched the execution mode */
6011 ctx
->bstate
= BS_STOP
;
6013 /* 6,7 are implementation dependent */
6023 rn
= "Invalid config selector";
6024 goto cp0_unimplemented
;
6030 gen_helper_mtc0_lladdr(cpu_env
, arg
);
6034 CP0_CHECK(ctx
->mrp
);
6035 gen_helper_mtc0_maar(cpu_env
, arg
);
6039 CP0_CHECK(ctx
->mrp
);
6040 gen_helper_mtc0_maari(cpu_env
, arg
);
6044 goto cp0_unimplemented
;
6050 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
6054 goto cp0_unimplemented
;
6060 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
6064 goto cp0_unimplemented
;
6070 #if defined(TARGET_MIPS64)
6071 check_insn(ctx
, ISA_MIPS3
);
6072 gen_helper_mtc0_xcontext(cpu_env
, arg
);
6077 goto cp0_unimplemented
;
6081 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6082 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6085 gen_helper_mtc0_framemask(cpu_env
, arg
);
6089 goto cp0_unimplemented
;
6094 rn
= "Diagnostic"; /* implementation dependent */
6099 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
6100 /* BS_STOP isn't good enough here, hflags may have changed. */
6101 gen_save_pc(ctx
->pc
+ 4);
6102 ctx
->bstate
= BS_EXCP
;
6106 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
6107 rn
= "TraceControl";
6108 /* Stop translation as we may have switched the execution mode */
6109 ctx
->bstate
= BS_STOP
;
6112 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
6113 rn
= "TraceControl2";
6114 /* Stop translation as we may have switched the execution mode */
6115 ctx
->bstate
= BS_STOP
;
6118 /* Stop translation as we may have switched the execution mode */
6119 ctx
->bstate
= BS_STOP
;
6120 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
6121 rn
= "UserTraceData";
6122 /* Stop translation as we may have switched the execution mode */
6123 ctx
->bstate
= BS_STOP
;
6126 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
6127 /* Stop translation as we may have switched the execution mode */
6128 ctx
->bstate
= BS_STOP
;
6132 goto cp0_unimplemented
;
6139 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6143 goto cp0_unimplemented
;
6149 gen_helper_mtc0_performance0(cpu_env
, arg
);
6150 rn
= "Performance0";
6153 // gen_helper_mtc0_performance1(arg);
6154 rn
= "Performance1";
6157 // gen_helper_mtc0_performance2(arg);
6158 rn
= "Performance2";
6161 // gen_helper_mtc0_performance3(arg);
6162 rn
= "Performance3";
6165 // gen_helper_mtc0_performance4(arg);
6166 rn
= "Performance4";
6169 // gen_helper_mtc0_performance5(arg);
6170 rn
= "Performance5";
6173 // gen_helper_mtc0_performance6(arg);
6174 rn
= "Performance6";
6177 // gen_helper_mtc0_performance7(arg);
6178 rn
= "Performance7";
6181 goto cp0_unimplemented
;
6187 gen_helper_mtc0_errctl(cpu_env
, arg
);
6188 ctx
->bstate
= BS_STOP
;
6192 goto cp0_unimplemented
;
6202 goto cp0_unimplemented
;
6211 gen_helper_mtc0_taglo(cpu_env
, arg
);
6218 gen_helper_mtc0_datalo(cpu_env
, arg
);
6222 goto cp0_unimplemented
;
6231 gen_helper_mtc0_taghi(cpu_env
, arg
);
6238 gen_helper_mtc0_datahi(cpu_env
, arg
);
6243 goto cp0_unimplemented
;
6249 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6253 goto cp0_unimplemented
;
6260 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6264 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6265 tcg_gen_st_tl(arg
, cpu_env
,
6266 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6270 goto cp0_unimplemented
;
6272 /* Stop translation as we may have switched the execution mode */
6273 ctx
->bstate
= BS_STOP
;
6276 goto cp0_unimplemented
;
6278 (void)rn
; /* avoid a compiler warning */
6279 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6280 /* For simplicity assume that all writes can cause interrupts. */
6281 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6283 ctx
->bstate
= BS_STOP
;
6288 LOG_DISAS("mtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6291 #if defined(TARGET_MIPS64)
6292 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6294 const char *rn
= "invalid";
6297 check_insn(ctx
, ISA_MIPS64
);
6303 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6307 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6308 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6312 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6313 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6317 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6318 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6323 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6327 goto cp0_unimplemented
;
6333 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6334 gen_helper_mfc0_random(arg
, cpu_env
);
6338 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6343 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6344 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6348 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6349 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6353 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6354 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_YQMask
));
6358 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6359 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6363 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6364 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6365 rn
= "VPEScheFBack";
6368 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6369 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6373 goto cp0_unimplemented
;
6379 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6383 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6384 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6388 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6389 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6393 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6394 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
6398 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6399 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
6403 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6404 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
6408 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6409 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
6413 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6414 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
6418 goto cp0_unimplemented
;
6424 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6429 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6430 rn
= "GlobalNumber";
6433 goto cp0_unimplemented
;
6439 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6443 // gen_helper_dmfc0_contextconfig(arg); /* SmartMIPS ASE */
6444 rn
= "ContextConfig";
6445 goto cp0_unimplemented
;
6448 CP0_CHECK(ctx
->ulri
);
6449 tcg_gen_ld_tl(arg
, cpu_env
,
6450 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6454 goto cp0_unimplemented
;
6460 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6464 check_insn(ctx
, ISA_MIPS32R2
);
6465 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6469 goto cp0_unimplemented
;
6475 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
6479 check_insn(ctx
, ISA_MIPS32R2
);
6480 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
6484 check_insn(ctx
, ISA_MIPS32R2
);
6485 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
6489 check_insn(ctx
, ISA_MIPS32R2
);
6490 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
6494 check_insn(ctx
, ISA_MIPS32R2
);
6495 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
6499 check_insn(ctx
, ISA_MIPS32R2
);
6500 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
6504 goto cp0_unimplemented
;
6510 check_insn(ctx
, ISA_MIPS32R2
);
6511 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
6515 goto cp0_unimplemented
;
6521 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
6526 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
6531 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
6535 goto cp0_unimplemented
;
6541 /* Mark as an IO operation because we read the time. */
6542 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6545 gen_helper_mfc0_count(arg
, cpu_env
);
6546 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6549 /* Break the TB to be able to take timer interrupts immediately
6550 after reading count. */
6551 ctx
->bstate
= BS_STOP
;
6554 /* 6,7 are implementation dependent */
6556 goto cp0_unimplemented
;
6562 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
6566 goto cp0_unimplemented
;
6572 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
6575 /* 6,7 are implementation dependent */
6577 goto cp0_unimplemented
;
6583 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
6587 check_insn(ctx
, ISA_MIPS32R2
);
6588 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
6592 check_insn(ctx
, ISA_MIPS32R2
);
6593 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
6597 check_insn(ctx
, ISA_MIPS32R2
);
6598 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
6602 goto cp0_unimplemented
;
6608 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
6612 goto cp0_unimplemented
;
6618 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
6622 goto cp0_unimplemented
;
6628 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
6632 check_insn(ctx
, ISA_MIPS32R2
);
6633 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_EBase
));
6637 check_insn(ctx
, ISA_MIPS32R2
);
6638 CP0_CHECK(ctx
->cmgcr
);
6639 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
6643 goto cp0_unimplemented
;
6649 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
6653 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
6657 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
6661 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
6665 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
6669 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
6672 /* 6,7 are implementation dependent */
6674 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
6678 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
6682 goto cp0_unimplemented
;
6688 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
6692 CP0_CHECK(ctx
->mrp
);
6693 gen_helper_dmfc0_maar(arg
, cpu_env
);
6697 CP0_CHECK(ctx
->mrp
);
6698 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
6702 goto cp0_unimplemented
;
6708 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
6712 goto cp0_unimplemented
;
6718 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
6722 goto cp0_unimplemented
;
6728 check_insn(ctx
, ISA_MIPS3
);
6729 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
6733 goto cp0_unimplemented
;
6737 /* Officially reserved, but sel 0 is used for R1x000 framemask */
6738 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
6741 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
6745 goto cp0_unimplemented
;
6749 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6750 rn
= "'Diagnostic"; /* implementation dependent */
6755 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
6759 // gen_helper_dmfc0_tracecontrol(arg, cpu_env); /* PDtrace support */
6760 rn
= "TraceControl";
6763 // gen_helper_dmfc0_tracecontrol2(arg, cpu_env); /* PDtrace support */
6764 rn
= "TraceControl2";
6767 // gen_helper_dmfc0_usertracedata(arg, cpu_env); /* PDtrace support */
6768 rn
= "UserTraceData";
6771 // gen_helper_dmfc0_tracebpc(arg, cpu_env); /* PDtrace support */
6775 goto cp0_unimplemented
;
6782 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
6786 goto cp0_unimplemented
;
6792 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
6793 rn
= "Performance0";
6796 // gen_helper_dmfc0_performance1(arg);
6797 rn
= "Performance1";
6800 // gen_helper_dmfc0_performance2(arg);
6801 rn
= "Performance2";
6804 // gen_helper_dmfc0_performance3(arg);
6805 rn
= "Performance3";
6808 // gen_helper_dmfc0_performance4(arg);
6809 rn
= "Performance4";
6812 // gen_helper_dmfc0_performance5(arg);
6813 rn
= "Performance5";
6816 // gen_helper_dmfc0_performance6(arg);
6817 rn
= "Performance6";
6820 // gen_helper_dmfc0_performance7(arg);
6821 rn
= "Performance7";
6824 goto cp0_unimplemented
;
6830 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
6834 goto cp0_unimplemented
;
6841 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
6845 goto cp0_unimplemented
;
6854 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6861 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
6865 goto cp0_unimplemented
;
6874 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
6881 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
6885 goto cp0_unimplemented
;
6891 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
6895 goto cp0_unimplemented
;
6902 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
6906 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
6907 tcg_gen_ld_tl(arg
, cpu_env
,
6908 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
6912 goto cp0_unimplemented
;
6916 goto cp0_unimplemented
;
6918 (void)rn
; /* avoid a compiler warning */
6919 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6923 LOG_DISAS("dmfc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
6924 gen_mfc0_unimplemented(ctx
, arg
);
6927 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6929 const char *rn
= "invalid";
6932 check_insn(ctx
, ISA_MIPS64
);
6934 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
6942 gen_helper_mtc0_index(cpu_env
, arg
);
6946 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6947 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
6951 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6956 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6966 goto cp0_unimplemented
;
6976 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6977 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
6981 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6982 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
6986 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6987 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
6991 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6992 gen_helper_mtc0_yqmask(cpu_env
, arg
);
6996 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6997 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
7001 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7002 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7003 rn
= "VPEScheFBack";
7006 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7007 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7011 goto cp0_unimplemented
;
7017 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
7021 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7022 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7026 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7027 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7031 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7032 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7036 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7037 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7041 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7042 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7046 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7047 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7051 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7052 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7056 goto cp0_unimplemented
;
7062 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
7068 rn
= "GlobalNumber";
7071 goto cp0_unimplemented
;
7077 gen_helper_mtc0_context(cpu_env
, arg
);
7081 // gen_helper_mtc0_contextconfig(cpu_env, arg); /* SmartMIPS ASE */
7082 rn
= "ContextConfig";
7083 goto cp0_unimplemented
;
7086 CP0_CHECK(ctx
->ulri
);
7087 tcg_gen_st_tl(arg
, cpu_env
,
7088 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7092 goto cp0_unimplemented
;
7098 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7102 check_insn(ctx
, ISA_MIPS32R2
);
7103 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7107 goto cp0_unimplemented
;
7113 gen_helper_mtc0_wired(cpu_env
, arg
);
7117 check_insn(ctx
, ISA_MIPS32R2
);
7118 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7122 check_insn(ctx
, ISA_MIPS32R2
);
7123 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7127 check_insn(ctx
, ISA_MIPS32R2
);
7128 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7132 check_insn(ctx
, ISA_MIPS32R2
);
7133 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7137 check_insn(ctx
, ISA_MIPS32R2
);
7138 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7142 goto cp0_unimplemented
;
7148 check_insn(ctx
, ISA_MIPS32R2
);
7149 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7150 ctx
->bstate
= BS_STOP
;
7154 goto cp0_unimplemented
;
7172 goto cp0_unimplemented
;
7178 gen_helper_mtc0_count(cpu_env
, arg
);
7181 /* 6,7 are implementation dependent */
7183 goto cp0_unimplemented
;
7185 /* Stop translation as we may have switched the execution mode */
7186 ctx
->bstate
= BS_STOP
;
7191 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7195 goto cp0_unimplemented
;
7201 gen_helper_mtc0_compare(cpu_env
, arg
);
7204 /* 6,7 are implementation dependent */
7206 goto cp0_unimplemented
;
7208 /* Stop translation as we may have switched the execution mode */
7209 ctx
->bstate
= BS_STOP
;
7214 save_cpu_state(ctx
, 1);
7215 gen_helper_mtc0_status(cpu_env
, arg
);
7216 /* BS_STOP isn't good enough here, hflags may have changed. */
7217 gen_save_pc(ctx
->pc
+ 4);
7218 ctx
->bstate
= BS_EXCP
;
7222 check_insn(ctx
, ISA_MIPS32R2
);
7223 gen_helper_mtc0_intctl(cpu_env
, arg
);
7224 /* Stop translation as we may have switched the execution mode */
7225 ctx
->bstate
= BS_STOP
;
7229 check_insn(ctx
, ISA_MIPS32R2
);
7230 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7231 /* Stop translation as we may have switched the execution mode */
7232 ctx
->bstate
= BS_STOP
;
7236 check_insn(ctx
, ISA_MIPS32R2
);
7237 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7238 /* Stop translation as we may have switched the execution mode */
7239 ctx
->bstate
= BS_STOP
;
7243 goto cp0_unimplemented
;
7249 save_cpu_state(ctx
, 1);
7250 /* Mark as an IO operation because we may trigger a software
7252 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7255 gen_helper_mtc0_cause(cpu_env
, arg
);
7256 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7259 /* Stop translation as we may have triggered an intetrupt */
7260 ctx
->bstate
= BS_STOP
;
7264 goto cp0_unimplemented
;
7270 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7274 goto cp0_unimplemented
;
7284 check_insn(ctx
, ISA_MIPS32R2
);
7285 gen_helper_mtc0_ebase(cpu_env
, arg
);
7289 goto cp0_unimplemented
;
7295 gen_helper_mtc0_config0(cpu_env
, arg
);
7297 /* Stop translation as we may have switched the execution mode */
7298 ctx
->bstate
= BS_STOP
;
7301 /* ignored, read only */
7305 gen_helper_mtc0_config2(cpu_env
, arg
);
7307 /* Stop translation as we may have switched the execution mode */
7308 ctx
->bstate
= BS_STOP
;
7311 gen_helper_mtc0_config3(cpu_env
, arg
);
7313 /* Stop translation as we may have switched the execution mode */
7314 ctx
->bstate
= BS_STOP
;
7317 /* currently ignored */
7321 gen_helper_mtc0_config5(cpu_env
, arg
);
7323 /* Stop translation as we may have switched the execution mode */
7324 ctx
->bstate
= BS_STOP
;
7326 /* 6,7 are implementation dependent */
7328 rn
= "Invalid config selector";
7329 goto cp0_unimplemented
;
7335 gen_helper_mtc0_lladdr(cpu_env
, arg
);
7339 CP0_CHECK(ctx
->mrp
);
7340 gen_helper_mtc0_maar(cpu_env
, arg
);
7344 CP0_CHECK(ctx
->mrp
);
7345 gen_helper_mtc0_maari(cpu_env
, arg
);
7349 goto cp0_unimplemented
;
7355 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
7359 goto cp0_unimplemented
;
7365 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
7369 goto cp0_unimplemented
;
7375 check_insn(ctx
, ISA_MIPS3
);
7376 gen_helper_mtc0_xcontext(cpu_env
, arg
);
7380 goto cp0_unimplemented
;
7384 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7385 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS32R6
));
7388 gen_helper_mtc0_framemask(cpu_env
, arg
);
7392 goto cp0_unimplemented
;
7397 rn
= "Diagnostic"; /* implementation dependent */
7402 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
7403 /* BS_STOP isn't good enough here, hflags may have changed. */
7404 gen_save_pc(ctx
->pc
+ 4);
7405 ctx
->bstate
= BS_EXCP
;
7409 // gen_helper_mtc0_tracecontrol(cpu_env, arg); /* PDtrace support */
7410 /* Stop translation as we may have switched the execution mode */
7411 ctx
->bstate
= BS_STOP
;
7412 rn
= "TraceControl";
7415 // gen_helper_mtc0_tracecontrol2(cpu_env, arg); /* PDtrace support */
7416 /* Stop translation as we may have switched the execution mode */
7417 ctx
->bstate
= BS_STOP
;
7418 rn
= "TraceControl2";
7421 // gen_helper_mtc0_usertracedata(cpu_env, arg); /* PDtrace support */
7422 /* Stop translation as we may have switched the execution mode */
7423 ctx
->bstate
= BS_STOP
;
7424 rn
= "UserTraceData";
7427 // gen_helper_mtc0_tracebpc(cpu_env, arg); /* PDtrace support */
7428 /* Stop translation as we may have switched the execution mode */
7429 ctx
->bstate
= BS_STOP
;
7433 goto cp0_unimplemented
;
7440 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7444 goto cp0_unimplemented
;
7450 gen_helper_mtc0_performance0(cpu_env
, arg
);
7451 rn
= "Performance0";
7454 // gen_helper_mtc0_performance1(cpu_env, arg);
7455 rn
= "Performance1";
7458 // gen_helper_mtc0_performance2(cpu_env, arg);
7459 rn
= "Performance2";
7462 // gen_helper_mtc0_performance3(cpu_env, arg);
7463 rn
= "Performance3";
7466 // gen_helper_mtc0_performance4(cpu_env, arg);
7467 rn
= "Performance4";
7470 // gen_helper_mtc0_performance5(cpu_env, arg);
7471 rn
= "Performance5";
7474 // gen_helper_mtc0_performance6(cpu_env, arg);
7475 rn
= "Performance6";
7478 // gen_helper_mtc0_performance7(cpu_env, arg);
7479 rn
= "Performance7";
7482 goto cp0_unimplemented
;
7488 gen_helper_mtc0_errctl(cpu_env
, arg
);
7489 ctx
->bstate
= BS_STOP
;
7493 goto cp0_unimplemented
;
7503 goto cp0_unimplemented
;
7512 gen_helper_mtc0_taglo(cpu_env
, arg
);
7519 gen_helper_mtc0_datalo(cpu_env
, arg
);
7523 goto cp0_unimplemented
;
7532 gen_helper_mtc0_taghi(cpu_env
, arg
);
7539 gen_helper_mtc0_datahi(cpu_env
, arg
);
7544 goto cp0_unimplemented
;
7550 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7554 goto cp0_unimplemented
;
7561 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7565 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7566 tcg_gen_st_tl(arg
, cpu_env
,
7567 offsetof(CPUMIPSState
, CP0_KScratch
[sel
-2]));
7571 goto cp0_unimplemented
;
7573 /* Stop translation as we may have switched the execution mode */
7574 ctx
->bstate
= BS_STOP
;
7577 goto cp0_unimplemented
;
7579 (void)rn
; /* avoid a compiler warning */
7580 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7581 /* For simplicity assume that all writes can cause interrupts. */
7582 if (ctx
->tb
->cflags
& CF_USE_ICOUNT
) {
7584 ctx
->bstate
= BS_STOP
;
7589 LOG_DISAS("dmtc0 %s (reg %d sel %d)\n", rn
, reg
, sel
);
7591 #endif /* TARGET_MIPS64 */
7593 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
7594 int u
, int sel
, int h
)
7596 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7597 TCGv t0
= tcg_temp_local_new();
7599 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7600 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7601 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7602 tcg_gen_movi_tl(t0
, -1);
7603 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7604 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7605 tcg_gen_movi_tl(t0
, -1);
7611 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
7614 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
7624 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
7627 gen_helper_mftc0_tcbind(t0
, cpu_env
);
7630 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
7633 gen_helper_mftc0_tchalt(t0
, cpu_env
);
7636 gen_helper_mftc0_tccontext(t0
, cpu_env
);
7639 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
7642 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
7645 gen_mfc0(ctx
, t0
, rt
, sel
);
7652 gen_helper_mftc0_entryhi(t0
, cpu_env
);
7655 gen_mfc0(ctx
, t0
, rt
, sel
);
7661 gen_helper_mftc0_status(t0
, cpu_env
);
7664 gen_mfc0(ctx
, t0
, rt
, sel
);
7670 gen_helper_mftc0_cause(t0
, cpu_env
);
7680 gen_helper_mftc0_epc(t0
, cpu_env
);
7690 gen_helper_mftc0_ebase(t0
, cpu_env
);
7700 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
7710 gen_helper_mftc0_debug(t0
, cpu_env
);
7713 gen_mfc0(ctx
, t0
, rt
, sel
);
7718 gen_mfc0(ctx
, t0
, rt
, sel
);
7720 } else switch (sel
) {
7721 /* GPR registers. */
7723 gen_helper_1e0i(mftgpr
, t0
, rt
);
7725 /* Auxiliary CPU registers */
7729 gen_helper_1e0i(mftlo
, t0
, 0);
7732 gen_helper_1e0i(mfthi
, t0
, 0);
7735 gen_helper_1e0i(mftacx
, t0
, 0);
7738 gen_helper_1e0i(mftlo
, t0
, 1);
7741 gen_helper_1e0i(mfthi
, t0
, 1);
7744 gen_helper_1e0i(mftacx
, t0
, 1);
7747 gen_helper_1e0i(mftlo
, t0
, 2);
7750 gen_helper_1e0i(mfthi
, t0
, 2);
7753 gen_helper_1e0i(mftacx
, t0
, 2);
7756 gen_helper_1e0i(mftlo
, t0
, 3);
7759 gen_helper_1e0i(mfthi
, t0
, 3);
7762 gen_helper_1e0i(mftacx
, t0
, 3);
7765 gen_helper_mftdsp(t0
, cpu_env
);
7771 /* Floating point (COP1). */
7773 /* XXX: For now we support only a single FPU context. */
7775 TCGv_i32 fp0
= tcg_temp_new_i32();
7777 gen_load_fpr32(ctx
, fp0
, rt
);
7778 tcg_gen_ext_i32_tl(t0
, fp0
);
7779 tcg_temp_free_i32(fp0
);
7781 TCGv_i32 fp0
= tcg_temp_new_i32();
7783 gen_load_fpr32h(ctx
, fp0
, rt
);
7784 tcg_gen_ext_i32_tl(t0
, fp0
);
7785 tcg_temp_free_i32(fp0
);
7789 /* XXX: For now we support only a single FPU context. */
7790 gen_helper_1e0i(cfc1
, t0
, rt
);
7792 /* COP2: Not implemented. */
7799 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7800 gen_store_gpr(t0
, rd
);
7806 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
7807 generate_exception_end(ctx
, EXCP_RI
);
7810 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
7811 int u
, int sel
, int h
)
7813 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
7814 TCGv t0
= tcg_temp_local_new();
7816 gen_load_gpr(t0
, rt
);
7817 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
7818 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
7819 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
))))
7821 else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
7822 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
)))
7829 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
7832 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
7842 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
7845 gen_helper_mttc0_tcbind(cpu_env
, t0
);
7848 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
7851 gen_helper_mttc0_tchalt(cpu_env
, t0
);
7854 gen_helper_mttc0_tccontext(cpu_env
, t0
);
7857 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
7860 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
7863 gen_mtc0(ctx
, t0
, rd
, sel
);
7870 gen_helper_mttc0_entryhi(cpu_env
, t0
);
7873 gen_mtc0(ctx
, t0
, rd
, sel
);
7879 gen_helper_mttc0_status(cpu_env
, t0
);
7882 gen_mtc0(ctx
, t0
, rd
, sel
);
7888 gen_helper_mttc0_cause(cpu_env
, t0
);
7898 gen_helper_mttc0_ebase(cpu_env
, t0
);
7908 gen_helper_mttc0_debug(cpu_env
, t0
);
7911 gen_mtc0(ctx
, t0
, rd
, sel
);
7916 gen_mtc0(ctx
, t0
, rd
, sel
);
7918 } else switch (sel
) {
7919 /* GPR registers. */
7921 gen_helper_0e1i(mttgpr
, t0
, rd
);
7923 /* Auxiliary CPU registers */
7927 gen_helper_0e1i(mttlo
, t0
, 0);
7930 gen_helper_0e1i(mtthi
, t0
, 0);
7933 gen_helper_0e1i(mttacx
, t0
, 0);
7936 gen_helper_0e1i(mttlo
, t0
, 1);
7939 gen_helper_0e1i(mtthi
, t0
, 1);
7942 gen_helper_0e1i(mttacx
, t0
, 1);
7945 gen_helper_0e1i(mttlo
, t0
, 2);
7948 gen_helper_0e1i(mtthi
, t0
, 2);
7951 gen_helper_0e1i(mttacx
, t0
, 2);
7954 gen_helper_0e1i(mttlo
, t0
, 3);
7957 gen_helper_0e1i(mtthi
, t0
, 3);
7960 gen_helper_0e1i(mttacx
, t0
, 3);
7963 gen_helper_mttdsp(cpu_env
, t0
);
7969 /* Floating point (COP1). */
7971 /* XXX: For now we support only a single FPU context. */
7973 TCGv_i32 fp0
= tcg_temp_new_i32();
7975 tcg_gen_trunc_tl_i32(fp0
, t0
);
7976 gen_store_fpr32(ctx
, fp0
, rd
);
7977 tcg_temp_free_i32(fp0
);
7979 TCGv_i32 fp0
= tcg_temp_new_i32();
7981 tcg_gen_trunc_tl_i32(fp0
, t0
);
7982 gen_store_fpr32h(ctx
, fp0
, rd
);
7983 tcg_temp_free_i32(fp0
);
7987 /* XXX: For now we support only a single FPU context. */
7989 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
7991 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
7992 tcg_temp_free_i32(fs_tmp
);
7994 /* Stop translation as we may have changed hflags */
7995 ctx
->bstate
= BS_STOP
;
7997 /* COP2: Not implemented. */
8004 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8010 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
8011 generate_exception_end(ctx
, EXCP_RI
);
8014 static void gen_cp0 (CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
, int rt
, int rd
)
8016 const char *opn
= "ldst";
8018 check_cp0_enabled(ctx
);
8025 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8030 TCGv t0
= tcg_temp_new();
8032 gen_load_gpr(t0
, rt
);
8033 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8038 #if defined(TARGET_MIPS64)
8040 check_insn(ctx
, ISA_MIPS3
);
8045 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8049 check_insn(ctx
, ISA_MIPS3
);
8051 TCGv t0
= tcg_temp_new();
8053 gen_load_gpr(t0
, rt
);
8054 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8066 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
8072 TCGv t0
= tcg_temp_new();
8073 gen_load_gpr(t0
, rt
);
8074 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
8080 check_insn(ctx
, ASE_MT
);
8085 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
8086 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8090 check_insn(ctx
, ASE_MT
);
8091 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
8092 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
8097 if (!env
->tlb
->helper_tlbwi
)
8099 gen_helper_tlbwi(cpu_env
);
8104 if (!env
->tlb
->helper_tlbinv
) {
8107 gen_helper_tlbinv(cpu_env
);
8108 } /* treat as nop if TLBINV not supported */
8113 if (!env
->tlb
->helper_tlbinvf
) {
8116 gen_helper_tlbinvf(cpu_env
);
8117 } /* treat as nop if TLBINV not supported */
8121 if (!env
->tlb
->helper_tlbwr
)
8123 gen_helper_tlbwr(cpu_env
);
8127 if (!env
->tlb
->helper_tlbp
)
8129 gen_helper_tlbp(cpu_env
);
8133 if (!env
->tlb
->helper_tlbr
)
8135 gen_helper_tlbr(cpu_env
);
8137 case OPC_ERET
: /* OPC_ERETNC */
8138 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8139 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8142 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
8143 if (ctx
->opcode
& (1 << bit_shift
)) {
8146 check_insn(ctx
, ISA_MIPS32R5
);
8147 gen_helper_eretnc(cpu_env
);
8151 check_insn(ctx
, ISA_MIPS2
);
8152 gen_helper_eret(cpu_env
);
8154 ctx
->bstate
= BS_EXCP
;
8159 check_insn(ctx
, ISA_MIPS32
);
8160 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8161 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8164 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
8166 generate_exception_end(ctx
, EXCP_RI
);
8168 gen_helper_deret(cpu_env
);
8169 ctx
->bstate
= BS_EXCP
;
8174 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
8175 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
8176 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8179 /* If we get an exception, we want to restart at next instruction */
8181 save_cpu_state(ctx
, 1);
8183 gen_helper_wait(cpu_env
);
8184 ctx
->bstate
= BS_EXCP
;
8189 generate_exception_end(ctx
, EXCP_RI
);
8192 (void)opn
; /* avoid a compiler warning */
8194 #endif /* !CONFIG_USER_ONLY */
8196 /* CP1 Branches (before delay slot) */
8197 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
8198 int32_t cc
, int32_t offset
)
8200 target_ulong btarget
;
8201 TCGv_i32 t0
= tcg_temp_new_i32();
8203 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
8204 generate_exception_end(ctx
, EXCP_RI
);
8209 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
8211 btarget
= ctx
->pc
+ 4 + offset
;
8215 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8216 tcg_gen_not_i32(t0
, t0
);
8217 tcg_gen_andi_i32(t0
, t0
, 1);
8218 tcg_gen_extu_i32_tl(bcond
, t0
);
8221 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8222 tcg_gen_not_i32(t0
, t0
);
8223 tcg_gen_andi_i32(t0
, t0
, 1);
8224 tcg_gen_extu_i32_tl(bcond
, t0
);
8227 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8228 tcg_gen_andi_i32(t0
, t0
, 1);
8229 tcg_gen_extu_i32_tl(bcond
, t0
);
8232 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8233 tcg_gen_andi_i32(t0
, t0
, 1);
8234 tcg_gen_extu_i32_tl(bcond
, t0
);
8236 ctx
->hflags
|= MIPS_HFLAG_BL
;
8240 TCGv_i32 t1
= tcg_temp_new_i32();
8241 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8242 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8243 tcg_gen_nand_i32(t0
, t0
, t1
);
8244 tcg_temp_free_i32(t1
);
8245 tcg_gen_andi_i32(t0
, t0
, 1);
8246 tcg_gen_extu_i32_tl(bcond
, t0
);
8251 TCGv_i32 t1
= tcg_temp_new_i32();
8252 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8253 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8254 tcg_gen_or_i32(t0
, t0
, t1
);
8255 tcg_temp_free_i32(t1
);
8256 tcg_gen_andi_i32(t0
, t0
, 1);
8257 tcg_gen_extu_i32_tl(bcond
, t0
);
8262 TCGv_i32 t1
= tcg_temp_new_i32();
8263 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8264 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8265 tcg_gen_and_i32(t0
, t0
, t1
);
8266 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8267 tcg_gen_and_i32(t0
, t0
, t1
);
8268 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8269 tcg_gen_nand_i32(t0
, t0
, t1
);
8270 tcg_temp_free_i32(t1
);
8271 tcg_gen_andi_i32(t0
, t0
, 1);
8272 tcg_gen_extu_i32_tl(bcond
, t0
);
8277 TCGv_i32 t1
= tcg_temp_new_i32();
8278 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
8279 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+1));
8280 tcg_gen_or_i32(t0
, t0
, t1
);
8281 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+2));
8282 tcg_gen_or_i32(t0
, t0
, t1
);
8283 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+3));
8284 tcg_gen_or_i32(t0
, t0
, t1
);
8285 tcg_temp_free_i32(t1
);
8286 tcg_gen_andi_i32(t0
, t0
, 1);
8287 tcg_gen_extu_i32_tl(bcond
, t0
);
8290 ctx
->hflags
|= MIPS_HFLAG_BC
;
8293 MIPS_INVAL("cp1 cond branch");
8294 generate_exception_end(ctx
, EXCP_RI
);
8297 ctx
->btarget
= btarget
;
8298 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8300 tcg_temp_free_i32(t0
);
8303 /* R6 CP1 Branches */
8304 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
8305 int32_t ft
, int32_t offset
,
8308 target_ulong btarget
;
8309 TCGv_i64 t0
= tcg_temp_new_i64();
8311 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
8312 #ifdef MIPS_DEBUG_DISAS
8313 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
8316 generate_exception_end(ctx
, EXCP_RI
);
8320 gen_load_fpr64(ctx
, t0
, ft
);
8321 tcg_gen_andi_i64(t0
, t0
, 1);
8323 btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
8327 tcg_gen_xori_i64(t0
, t0
, 1);
8328 ctx
->hflags
|= MIPS_HFLAG_BC
;
8331 /* t0 already set */
8332 ctx
->hflags
|= MIPS_HFLAG_BC
;
8335 MIPS_INVAL("cp1 cond branch");
8336 generate_exception_end(ctx
, EXCP_RI
);
8340 tcg_gen_trunc_i64_tl(bcond
, t0
);
8342 ctx
->btarget
= btarget
;
8344 switch (delayslot_size
) {
8346 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
8349 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
8354 tcg_temp_free_i64(t0
);
8357 /* Coprocessor 1 (FPU) */
8359 #define FOP(func, fmt) (((fmt) << 21) | (func))
8362 OPC_ADD_S
= FOP(0, FMT_S
),
8363 OPC_SUB_S
= FOP(1, FMT_S
),
8364 OPC_MUL_S
= FOP(2, FMT_S
),
8365 OPC_DIV_S
= FOP(3, FMT_S
),
8366 OPC_SQRT_S
= FOP(4, FMT_S
),
8367 OPC_ABS_S
= FOP(5, FMT_S
),
8368 OPC_MOV_S
= FOP(6, FMT_S
),
8369 OPC_NEG_S
= FOP(7, FMT_S
),
8370 OPC_ROUND_L_S
= FOP(8, FMT_S
),
8371 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
8372 OPC_CEIL_L_S
= FOP(10, FMT_S
),
8373 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
8374 OPC_ROUND_W_S
= FOP(12, FMT_S
),
8375 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
8376 OPC_CEIL_W_S
= FOP(14, FMT_S
),
8377 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
8378 OPC_SEL_S
= FOP(16, FMT_S
),
8379 OPC_MOVCF_S
= FOP(17, FMT_S
),
8380 OPC_MOVZ_S
= FOP(18, FMT_S
),
8381 OPC_MOVN_S
= FOP(19, FMT_S
),
8382 OPC_SELEQZ_S
= FOP(20, FMT_S
),
8383 OPC_RECIP_S
= FOP(21, FMT_S
),
8384 OPC_RSQRT_S
= FOP(22, FMT_S
),
8385 OPC_SELNEZ_S
= FOP(23, FMT_S
),
8386 OPC_MADDF_S
= FOP(24, FMT_S
),
8387 OPC_MSUBF_S
= FOP(25, FMT_S
),
8388 OPC_RINT_S
= FOP(26, FMT_S
),
8389 OPC_CLASS_S
= FOP(27, FMT_S
),
8390 OPC_MIN_S
= FOP(28, FMT_S
),
8391 OPC_RECIP2_S
= FOP(28, FMT_S
),
8392 OPC_MINA_S
= FOP(29, FMT_S
),
8393 OPC_RECIP1_S
= FOP(29, FMT_S
),
8394 OPC_MAX_S
= FOP(30, FMT_S
),
8395 OPC_RSQRT1_S
= FOP(30, FMT_S
),
8396 OPC_MAXA_S
= FOP(31, FMT_S
),
8397 OPC_RSQRT2_S
= FOP(31, FMT_S
),
8398 OPC_CVT_D_S
= FOP(33, FMT_S
),
8399 OPC_CVT_W_S
= FOP(36, FMT_S
),
8400 OPC_CVT_L_S
= FOP(37, FMT_S
),
8401 OPC_CVT_PS_S
= FOP(38, FMT_S
),
8402 OPC_CMP_F_S
= FOP (48, FMT_S
),
8403 OPC_CMP_UN_S
= FOP (49, FMT_S
),
8404 OPC_CMP_EQ_S
= FOP (50, FMT_S
),
8405 OPC_CMP_UEQ_S
= FOP (51, FMT_S
),
8406 OPC_CMP_OLT_S
= FOP (52, FMT_S
),
8407 OPC_CMP_ULT_S
= FOP (53, FMT_S
),
8408 OPC_CMP_OLE_S
= FOP (54, FMT_S
),
8409 OPC_CMP_ULE_S
= FOP (55, FMT_S
),
8410 OPC_CMP_SF_S
= FOP (56, FMT_S
),
8411 OPC_CMP_NGLE_S
= FOP (57, FMT_S
),
8412 OPC_CMP_SEQ_S
= FOP (58, FMT_S
),
8413 OPC_CMP_NGL_S
= FOP (59, FMT_S
),
8414 OPC_CMP_LT_S
= FOP (60, FMT_S
),
8415 OPC_CMP_NGE_S
= FOP (61, FMT_S
),
8416 OPC_CMP_LE_S
= FOP (62, FMT_S
),
8417 OPC_CMP_NGT_S
= FOP (63, FMT_S
),
8419 OPC_ADD_D
= FOP(0, FMT_D
),
8420 OPC_SUB_D
= FOP(1, FMT_D
),
8421 OPC_MUL_D
= FOP(2, FMT_D
),
8422 OPC_DIV_D
= FOP(3, FMT_D
),
8423 OPC_SQRT_D
= FOP(4, FMT_D
),
8424 OPC_ABS_D
= FOP(5, FMT_D
),
8425 OPC_MOV_D
= FOP(6, FMT_D
),
8426 OPC_NEG_D
= FOP(7, FMT_D
),
8427 OPC_ROUND_L_D
= FOP(8, FMT_D
),
8428 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
8429 OPC_CEIL_L_D
= FOP(10, FMT_D
),
8430 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
8431 OPC_ROUND_W_D
= FOP(12, FMT_D
),
8432 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
8433 OPC_CEIL_W_D
= FOP(14, FMT_D
),
8434 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
8435 OPC_SEL_D
= FOP(16, FMT_D
),
8436 OPC_MOVCF_D
= FOP(17, FMT_D
),
8437 OPC_MOVZ_D
= FOP(18, FMT_D
),
8438 OPC_MOVN_D
= FOP(19, FMT_D
),
8439 OPC_SELEQZ_D
= FOP(20, FMT_D
),
8440 OPC_RECIP_D
= FOP(21, FMT_D
),
8441 OPC_RSQRT_D
= FOP(22, FMT_D
),
8442 OPC_SELNEZ_D
= FOP(23, FMT_D
),
8443 OPC_MADDF_D
= FOP(24, FMT_D
),
8444 OPC_MSUBF_D
= FOP(25, FMT_D
),
8445 OPC_RINT_D
= FOP(26, FMT_D
),
8446 OPC_CLASS_D
= FOP(27, FMT_D
),
8447 OPC_MIN_D
= FOP(28, FMT_D
),
8448 OPC_RECIP2_D
= FOP(28, FMT_D
),
8449 OPC_MINA_D
= FOP(29, FMT_D
),
8450 OPC_RECIP1_D
= FOP(29, FMT_D
),
8451 OPC_MAX_D
= FOP(30, FMT_D
),
8452 OPC_RSQRT1_D
= FOP(30, FMT_D
),
8453 OPC_MAXA_D
= FOP(31, FMT_D
),
8454 OPC_RSQRT2_D
= FOP(31, FMT_D
),
8455 OPC_CVT_S_D
= FOP(32, FMT_D
),
8456 OPC_CVT_W_D
= FOP(36, FMT_D
),
8457 OPC_CVT_L_D
= FOP(37, FMT_D
),
8458 OPC_CMP_F_D
= FOP (48, FMT_D
),
8459 OPC_CMP_UN_D
= FOP (49, FMT_D
),
8460 OPC_CMP_EQ_D
= FOP (50, FMT_D
),
8461 OPC_CMP_UEQ_D
= FOP (51, FMT_D
),
8462 OPC_CMP_OLT_D
= FOP (52, FMT_D
),
8463 OPC_CMP_ULT_D
= FOP (53, FMT_D
),
8464 OPC_CMP_OLE_D
= FOP (54, FMT_D
),
8465 OPC_CMP_ULE_D
= FOP (55, FMT_D
),
8466 OPC_CMP_SF_D
= FOP (56, FMT_D
),
8467 OPC_CMP_NGLE_D
= FOP (57, FMT_D
),
8468 OPC_CMP_SEQ_D
= FOP (58, FMT_D
),
8469 OPC_CMP_NGL_D
= FOP (59, FMT_D
),
8470 OPC_CMP_LT_D
= FOP (60, FMT_D
),
8471 OPC_CMP_NGE_D
= FOP (61, FMT_D
),
8472 OPC_CMP_LE_D
= FOP (62, FMT_D
),
8473 OPC_CMP_NGT_D
= FOP (63, FMT_D
),
8475 OPC_CVT_S_W
= FOP(32, FMT_W
),
8476 OPC_CVT_D_W
= FOP(33, FMT_W
),
8477 OPC_CVT_S_L
= FOP(32, FMT_L
),
8478 OPC_CVT_D_L
= FOP(33, FMT_L
),
8479 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
8481 OPC_ADD_PS
= FOP(0, FMT_PS
),
8482 OPC_SUB_PS
= FOP(1, FMT_PS
),
8483 OPC_MUL_PS
= FOP(2, FMT_PS
),
8484 OPC_DIV_PS
= FOP(3, FMT_PS
),
8485 OPC_ABS_PS
= FOP(5, FMT_PS
),
8486 OPC_MOV_PS
= FOP(6, FMT_PS
),
8487 OPC_NEG_PS
= FOP(7, FMT_PS
),
8488 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
8489 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
8490 OPC_MOVN_PS
= FOP(19, FMT_PS
),
8491 OPC_ADDR_PS
= FOP(24, FMT_PS
),
8492 OPC_MULR_PS
= FOP(26, FMT_PS
),
8493 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
8494 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
8495 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
8496 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
8498 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
8499 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
8500 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
8501 OPC_PLL_PS
= FOP(44, FMT_PS
),
8502 OPC_PLU_PS
= FOP(45, FMT_PS
),
8503 OPC_PUL_PS
= FOP(46, FMT_PS
),
8504 OPC_PUU_PS
= FOP(47, FMT_PS
),
8505 OPC_CMP_F_PS
= FOP (48, FMT_PS
),
8506 OPC_CMP_UN_PS
= FOP (49, FMT_PS
),
8507 OPC_CMP_EQ_PS
= FOP (50, FMT_PS
),
8508 OPC_CMP_UEQ_PS
= FOP (51, FMT_PS
),
8509 OPC_CMP_OLT_PS
= FOP (52, FMT_PS
),
8510 OPC_CMP_ULT_PS
= FOP (53, FMT_PS
),
8511 OPC_CMP_OLE_PS
= FOP (54, FMT_PS
),
8512 OPC_CMP_ULE_PS
= FOP (55, FMT_PS
),
8513 OPC_CMP_SF_PS
= FOP (56, FMT_PS
),
8514 OPC_CMP_NGLE_PS
= FOP (57, FMT_PS
),
8515 OPC_CMP_SEQ_PS
= FOP (58, FMT_PS
),
8516 OPC_CMP_NGL_PS
= FOP (59, FMT_PS
),
8517 OPC_CMP_LT_PS
= FOP (60, FMT_PS
),
8518 OPC_CMP_NGE_PS
= FOP (61, FMT_PS
),
8519 OPC_CMP_LE_PS
= FOP (62, FMT_PS
),
8520 OPC_CMP_NGT_PS
= FOP (63, FMT_PS
),
8524 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
8525 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
8526 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
8527 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
8528 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
8529 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
8530 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
8531 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
8532 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
8533 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
8534 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
8535 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
8536 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
8537 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
8538 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
8539 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
8540 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
8541 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
8542 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
8543 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
8544 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
8545 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
8547 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
8548 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
8549 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
8550 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
8551 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
8552 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
8553 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
8554 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
8555 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
8556 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
8557 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
8558 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
8559 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
8560 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
8561 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
8562 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
8563 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
8564 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
8565 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
8566 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
8567 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
8568 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
8570 static void gen_cp1 (DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
8572 TCGv t0
= tcg_temp_new();
8577 TCGv_i32 fp0
= tcg_temp_new_i32();
8579 gen_load_fpr32(ctx
, fp0
, fs
);
8580 tcg_gen_ext_i32_tl(t0
, fp0
);
8581 tcg_temp_free_i32(fp0
);
8583 gen_store_gpr(t0
, rt
);
8586 gen_load_gpr(t0
, rt
);
8588 TCGv_i32 fp0
= tcg_temp_new_i32();
8590 tcg_gen_trunc_tl_i32(fp0
, t0
);
8591 gen_store_fpr32(ctx
, fp0
, fs
);
8592 tcg_temp_free_i32(fp0
);
8596 gen_helper_1e0i(cfc1
, t0
, fs
);
8597 gen_store_gpr(t0
, rt
);
8600 gen_load_gpr(t0
, rt
);
8601 save_cpu_state(ctx
, 0);
8603 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
8605 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
8606 tcg_temp_free_i32(fs_tmp
);
8608 /* Stop translation as we may have changed hflags */
8609 ctx
->bstate
= BS_STOP
;
8611 #if defined(TARGET_MIPS64)
8613 gen_load_fpr64(ctx
, t0
, fs
);
8614 gen_store_gpr(t0
, rt
);
8617 gen_load_gpr(t0
, rt
);
8618 gen_store_fpr64(ctx
, t0
, fs
);
8623 TCGv_i32 fp0
= tcg_temp_new_i32();
8625 gen_load_fpr32h(ctx
, fp0
, fs
);
8626 tcg_gen_ext_i32_tl(t0
, fp0
);
8627 tcg_temp_free_i32(fp0
);
8629 gen_store_gpr(t0
, rt
);
8632 gen_load_gpr(t0
, rt
);
8634 TCGv_i32 fp0
= tcg_temp_new_i32();
8636 tcg_gen_trunc_tl_i32(fp0
, t0
);
8637 gen_store_fpr32h(ctx
, fp0
, fs
);
8638 tcg_temp_free_i32(fp0
);
8642 MIPS_INVAL("cp1 move");
8643 generate_exception_end(ctx
, EXCP_RI
);
8651 static void gen_movci (DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
8667 l1
= gen_new_label();
8668 t0
= tcg_temp_new_i32();
8669 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8670 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8671 tcg_temp_free_i32(t0
);
8673 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
8675 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
8680 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
8684 TCGv_i32 t0
= tcg_temp_new_i32();
8685 TCGLabel
*l1
= gen_new_label();
8692 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8693 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8694 gen_load_fpr32(ctx
, t0
, fs
);
8695 gen_store_fpr32(ctx
, t0
, fd
);
8697 tcg_temp_free_i32(t0
);
8700 static inline void gen_movcf_d (DisasContext
*ctx
, int fs
, int fd
, int cc
, int tf
)
8703 TCGv_i32 t0
= tcg_temp_new_i32();
8705 TCGLabel
*l1
= gen_new_label();
8712 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8713 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8714 tcg_temp_free_i32(t0
);
8715 fp0
= tcg_temp_new_i64();
8716 gen_load_fpr64(ctx
, fp0
, fs
);
8717 gen_store_fpr64(ctx
, fp0
, fd
);
8718 tcg_temp_free_i64(fp0
);
8722 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
8726 TCGv_i32 t0
= tcg_temp_new_i32();
8727 TCGLabel
*l1
= gen_new_label();
8728 TCGLabel
*l2
= gen_new_label();
8735 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
8736 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
8737 gen_load_fpr32(ctx
, t0
, fs
);
8738 gen_store_fpr32(ctx
, t0
, fd
);
8741 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+1));
8742 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
8743 gen_load_fpr32h(ctx
, t0
, fs
);
8744 gen_store_fpr32h(ctx
, t0
, fd
);
8745 tcg_temp_free_i32(t0
);
8749 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8752 TCGv_i32 t1
= tcg_const_i32(0);
8753 TCGv_i32 fp0
= tcg_temp_new_i32();
8754 TCGv_i32 fp1
= tcg_temp_new_i32();
8755 TCGv_i32 fp2
= tcg_temp_new_i32();
8756 gen_load_fpr32(ctx
, fp0
, fd
);
8757 gen_load_fpr32(ctx
, fp1
, ft
);
8758 gen_load_fpr32(ctx
, fp2
, fs
);
8762 tcg_gen_andi_i32(fp0
, fp0
, 1);
8763 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8766 tcg_gen_andi_i32(fp1
, fp1
, 1);
8767 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8770 tcg_gen_andi_i32(fp1
, fp1
, 1);
8771 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8774 MIPS_INVAL("gen_sel_s");
8775 generate_exception_end(ctx
, EXCP_RI
);
8779 gen_store_fpr32(ctx
, fp0
, fd
);
8780 tcg_temp_free_i32(fp2
);
8781 tcg_temp_free_i32(fp1
);
8782 tcg_temp_free_i32(fp0
);
8783 tcg_temp_free_i32(t1
);
8786 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
8789 TCGv_i64 t1
= tcg_const_i64(0);
8790 TCGv_i64 fp0
= tcg_temp_new_i64();
8791 TCGv_i64 fp1
= tcg_temp_new_i64();
8792 TCGv_i64 fp2
= tcg_temp_new_i64();
8793 gen_load_fpr64(ctx
, fp0
, fd
);
8794 gen_load_fpr64(ctx
, fp1
, ft
);
8795 gen_load_fpr64(ctx
, fp2
, fs
);
8799 tcg_gen_andi_i64(fp0
, fp0
, 1);
8800 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
8803 tcg_gen_andi_i64(fp1
, fp1
, 1);
8804 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
8807 tcg_gen_andi_i64(fp1
, fp1
, 1);
8808 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
8811 MIPS_INVAL("gen_sel_d");
8812 generate_exception_end(ctx
, EXCP_RI
);
8816 gen_store_fpr64(ctx
, fp0
, fd
);
8817 tcg_temp_free_i64(fp2
);
8818 tcg_temp_free_i64(fp1
);
8819 tcg_temp_free_i64(fp0
);
8820 tcg_temp_free_i64(t1
);
8823 static void gen_farith (DisasContext
*ctx
, enum fopcode op1
,
8824 int ft
, int fs
, int fd
, int cc
)
8826 uint32_t func
= ctx
->opcode
& 0x3f;
8830 TCGv_i32 fp0
= tcg_temp_new_i32();
8831 TCGv_i32 fp1
= tcg_temp_new_i32();
8833 gen_load_fpr32(ctx
, fp0
, fs
);
8834 gen_load_fpr32(ctx
, fp1
, ft
);
8835 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
8836 tcg_temp_free_i32(fp1
);
8837 gen_store_fpr32(ctx
, fp0
, fd
);
8838 tcg_temp_free_i32(fp0
);
8843 TCGv_i32 fp0
= tcg_temp_new_i32();
8844 TCGv_i32 fp1
= tcg_temp_new_i32();
8846 gen_load_fpr32(ctx
, fp0
, fs
);
8847 gen_load_fpr32(ctx
, fp1
, ft
);
8848 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
8849 tcg_temp_free_i32(fp1
);
8850 gen_store_fpr32(ctx
, fp0
, fd
);
8851 tcg_temp_free_i32(fp0
);
8856 TCGv_i32 fp0
= tcg_temp_new_i32();
8857 TCGv_i32 fp1
= tcg_temp_new_i32();
8859 gen_load_fpr32(ctx
, fp0
, fs
);
8860 gen_load_fpr32(ctx
, fp1
, ft
);
8861 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
8862 tcg_temp_free_i32(fp1
);
8863 gen_store_fpr32(ctx
, fp0
, fd
);
8864 tcg_temp_free_i32(fp0
);
8869 TCGv_i32 fp0
= tcg_temp_new_i32();
8870 TCGv_i32 fp1
= tcg_temp_new_i32();
8872 gen_load_fpr32(ctx
, fp0
, fs
);
8873 gen_load_fpr32(ctx
, fp1
, ft
);
8874 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
8875 tcg_temp_free_i32(fp1
);
8876 gen_store_fpr32(ctx
, fp0
, fd
);
8877 tcg_temp_free_i32(fp0
);
8882 TCGv_i32 fp0
= tcg_temp_new_i32();
8884 gen_load_fpr32(ctx
, fp0
, fs
);
8885 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
8886 gen_store_fpr32(ctx
, fp0
, fd
);
8887 tcg_temp_free_i32(fp0
);
8892 TCGv_i32 fp0
= tcg_temp_new_i32();
8894 gen_load_fpr32(ctx
, fp0
, fs
);
8896 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
8898 gen_helper_float_abs_s(fp0
, fp0
);
8900 gen_store_fpr32(ctx
, fp0
, fd
);
8901 tcg_temp_free_i32(fp0
);
8906 TCGv_i32 fp0
= tcg_temp_new_i32();
8908 gen_load_fpr32(ctx
, fp0
, fs
);
8909 gen_store_fpr32(ctx
, fp0
, fd
);
8910 tcg_temp_free_i32(fp0
);
8915 TCGv_i32 fp0
= tcg_temp_new_i32();
8917 gen_load_fpr32(ctx
, fp0
, fs
);
8919 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
8921 gen_helper_float_chs_s(fp0
, fp0
);
8923 gen_store_fpr32(ctx
, fp0
, fd
);
8924 tcg_temp_free_i32(fp0
);
8928 check_cp1_64bitmode(ctx
);
8930 TCGv_i32 fp32
= tcg_temp_new_i32();
8931 TCGv_i64 fp64
= tcg_temp_new_i64();
8933 gen_load_fpr32(ctx
, fp32
, fs
);
8935 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
8937 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
8939 tcg_temp_free_i32(fp32
);
8940 gen_store_fpr64(ctx
, fp64
, fd
);
8941 tcg_temp_free_i64(fp64
);
8945 check_cp1_64bitmode(ctx
);
8947 TCGv_i32 fp32
= tcg_temp_new_i32();
8948 TCGv_i64 fp64
= tcg_temp_new_i64();
8950 gen_load_fpr32(ctx
, fp32
, fs
);
8952 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
8954 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
8956 tcg_temp_free_i32(fp32
);
8957 gen_store_fpr64(ctx
, fp64
, fd
);
8958 tcg_temp_free_i64(fp64
);
8962 check_cp1_64bitmode(ctx
);
8964 TCGv_i32 fp32
= tcg_temp_new_i32();
8965 TCGv_i64 fp64
= tcg_temp_new_i64();
8967 gen_load_fpr32(ctx
, fp32
, fs
);
8969 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
8971 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
8973 tcg_temp_free_i32(fp32
);
8974 gen_store_fpr64(ctx
, fp64
, fd
);
8975 tcg_temp_free_i64(fp64
);
8979 check_cp1_64bitmode(ctx
);
8981 TCGv_i32 fp32
= tcg_temp_new_i32();
8982 TCGv_i64 fp64
= tcg_temp_new_i64();
8984 gen_load_fpr32(ctx
, fp32
, fs
);
8986 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
8988 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
8990 tcg_temp_free_i32(fp32
);
8991 gen_store_fpr64(ctx
, fp64
, fd
);
8992 tcg_temp_free_i64(fp64
);
8997 TCGv_i32 fp0
= tcg_temp_new_i32();
8999 gen_load_fpr32(ctx
, fp0
, fs
);
9001 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
9003 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
9005 gen_store_fpr32(ctx
, fp0
, fd
);
9006 tcg_temp_free_i32(fp0
);
9011 TCGv_i32 fp0
= tcg_temp_new_i32();
9013 gen_load_fpr32(ctx
, fp0
, fs
);
9015 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
9017 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
9019 gen_store_fpr32(ctx
, fp0
, fd
);
9020 tcg_temp_free_i32(fp0
);
9025 TCGv_i32 fp0
= tcg_temp_new_i32();
9027 gen_load_fpr32(ctx
, fp0
, fs
);
9029 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
9031 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
9033 gen_store_fpr32(ctx
, fp0
, fd
);
9034 tcg_temp_free_i32(fp0
);
9039 TCGv_i32 fp0
= tcg_temp_new_i32();
9041 gen_load_fpr32(ctx
, fp0
, fs
);
9043 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
9045 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
9047 gen_store_fpr32(ctx
, fp0
, fd
);
9048 tcg_temp_free_i32(fp0
);
9052 check_insn(ctx
, ISA_MIPS32R6
);
9053 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9056 check_insn(ctx
, ISA_MIPS32R6
);
9057 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9060 check_insn(ctx
, ISA_MIPS32R6
);
9061 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
9064 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9065 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9068 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9070 TCGLabel
*l1
= gen_new_label();
9074 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9076 fp0
= tcg_temp_new_i32();
9077 gen_load_fpr32(ctx
, fp0
, fs
);
9078 gen_store_fpr32(ctx
, fp0
, fd
);
9079 tcg_temp_free_i32(fp0
);
9084 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9086 TCGLabel
*l1
= gen_new_label();
9090 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9091 fp0
= tcg_temp_new_i32();
9092 gen_load_fpr32(ctx
, fp0
, fs
);
9093 gen_store_fpr32(ctx
, fp0
, fd
);
9094 tcg_temp_free_i32(fp0
);
9101 TCGv_i32 fp0
= tcg_temp_new_i32();
9103 gen_load_fpr32(ctx
, fp0
, fs
);
9104 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
9105 gen_store_fpr32(ctx
, fp0
, fd
);
9106 tcg_temp_free_i32(fp0
);
9111 TCGv_i32 fp0
= tcg_temp_new_i32();
9113 gen_load_fpr32(ctx
, fp0
, fs
);
9114 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
9115 gen_store_fpr32(ctx
, fp0
, fd
);
9116 tcg_temp_free_i32(fp0
);
9120 check_insn(ctx
, ISA_MIPS32R6
);
9122 TCGv_i32 fp0
= tcg_temp_new_i32();
9123 TCGv_i32 fp1
= tcg_temp_new_i32();
9124 TCGv_i32 fp2
= tcg_temp_new_i32();
9125 gen_load_fpr32(ctx
, fp0
, fs
);
9126 gen_load_fpr32(ctx
, fp1
, ft
);
9127 gen_load_fpr32(ctx
, fp2
, fd
);
9128 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9129 gen_store_fpr32(ctx
, fp2
, fd
);
9130 tcg_temp_free_i32(fp2
);
9131 tcg_temp_free_i32(fp1
);
9132 tcg_temp_free_i32(fp0
);
9136 check_insn(ctx
, ISA_MIPS32R6
);
9138 TCGv_i32 fp0
= tcg_temp_new_i32();
9139 TCGv_i32 fp1
= tcg_temp_new_i32();
9140 TCGv_i32 fp2
= tcg_temp_new_i32();
9141 gen_load_fpr32(ctx
, fp0
, fs
);
9142 gen_load_fpr32(ctx
, fp1
, ft
);
9143 gen_load_fpr32(ctx
, fp2
, fd
);
9144 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9145 gen_store_fpr32(ctx
, fp2
, fd
);
9146 tcg_temp_free_i32(fp2
);
9147 tcg_temp_free_i32(fp1
);
9148 tcg_temp_free_i32(fp0
);
9152 check_insn(ctx
, ISA_MIPS32R6
);
9154 TCGv_i32 fp0
= tcg_temp_new_i32();
9155 gen_load_fpr32(ctx
, fp0
, fs
);
9156 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
9157 gen_store_fpr32(ctx
, fp0
, fd
);
9158 tcg_temp_free_i32(fp0
);
9162 check_insn(ctx
, ISA_MIPS32R6
);
9164 TCGv_i32 fp0
= tcg_temp_new_i32();
9165 gen_load_fpr32(ctx
, fp0
, fs
);
9166 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
9167 gen_store_fpr32(ctx
, fp0
, fd
);
9168 tcg_temp_free_i32(fp0
);
9171 case OPC_MIN_S
: /* OPC_RECIP2_S */
9172 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9174 TCGv_i32 fp0
= tcg_temp_new_i32();
9175 TCGv_i32 fp1
= tcg_temp_new_i32();
9176 TCGv_i32 fp2
= tcg_temp_new_i32();
9177 gen_load_fpr32(ctx
, fp0
, fs
);
9178 gen_load_fpr32(ctx
, fp1
, ft
);
9179 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
9180 gen_store_fpr32(ctx
, fp2
, fd
);
9181 tcg_temp_free_i32(fp2
);
9182 tcg_temp_free_i32(fp1
);
9183 tcg_temp_free_i32(fp0
);
9186 check_cp1_64bitmode(ctx
);
9188 TCGv_i32 fp0
= tcg_temp_new_i32();
9189 TCGv_i32 fp1
= tcg_temp_new_i32();
9191 gen_load_fpr32(ctx
, fp0
, fs
);
9192 gen_load_fpr32(ctx
, fp1
, ft
);
9193 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
9194 tcg_temp_free_i32(fp1
);
9195 gen_store_fpr32(ctx
, fp0
, fd
);
9196 tcg_temp_free_i32(fp0
);
9200 case OPC_MINA_S
: /* OPC_RECIP1_S */
9201 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9203 TCGv_i32 fp0
= tcg_temp_new_i32();
9204 TCGv_i32 fp1
= tcg_temp_new_i32();
9205 TCGv_i32 fp2
= tcg_temp_new_i32();
9206 gen_load_fpr32(ctx
, fp0
, fs
);
9207 gen_load_fpr32(ctx
, fp1
, ft
);
9208 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
9209 gen_store_fpr32(ctx
, fp2
, fd
);
9210 tcg_temp_free_i32(fp2
);
9211 tcg_temp_free_i32(fp1
);
9212 tcg_temp_free_i32(fp0
);
9215 check_cp1_64bitmode(ctx
);
9217 TCGv_i32 fp0
= tcg_temp_new_i32();
9219 gen_load_fpr32(ctx
, fp0
, fs
);
9220 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
9221 gen_store_fpr32(ctx
, fp0
, fd
);
9222 tcg_temp_free_i32(fp0
);
9226 case OPC_MAX_S
: /* OPC_RSQRT1_S */
9227 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9229 TCGv_i32 fp0
= tcg_temp_new_i32();
9230 TCGv_i32 fp1
= tcg_temp_new_i32();
9231 gen_load_fpr32(ctx
, fp0
, fs
);
9232 gen_load_fpr32(ctx
, fp1
, ft
);
9233 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
9234 gen_store_fpr32(ctx
, fp1
, fd
);
9235 tcg_temp_free_i32(fp1
);
9236 tcg_temp_free_i32(fp0
);
9239 check_cp1_64bitmode(ctx
);
9241 TCGv_i32 fp0
= tcg_temp_new_i32();
9243 gen_load_fpr32(ctx
, fp0
, fs
);
9244 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
9245 gen_store_fpr32(ctx
, fp0
, fd
);
9246 tcg_temp_free_i32(fp0
);
9250 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
9251 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9253 TCGv_i32 fp0
= tcg_temp_new_i32();
9254 TCGv_i32 fp1
= tcg_temp_new_i32();
9255 gen_load_fpr32(ctx
, fp0
, fs
);
9256 gen_load_fpr32(ctx
, fp1
, ft
);
9257 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
9258 gen_store_fpr32(ctx
, fp1
, fd
);
9259 tcg_temp_free_i32(fp1
);
9260 tcg_temp_free_i32(fp0
);
9263 check_cp1_64bitmode(ctx
);
9265 TCGv_i32 fp0
= tcg_temp_new_i32();
9266 TCGv_i32 fp1
= tcg_temp_new_i32();
9268 gen_load_fpr32(ctx
, fp0
, fs
);
9269 gen_load_fpr32(ctx
, fp1
, ft
);
9270 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
9271 tcg_temp_free_i32(fp1
);
9272 gen_store_fpr32(ctx
, fp0
, fd
);
9273 tcg_temp_free_i32(fp0
);
9278 check_cp1_registers(ctx
, fd
);
9280 TCGv_i32 fp32
= tcg_temp_new_i32();
9281 TCGv_i64 fp64
= tcg_temp_new_i64();
9283 gen_load_fpr32(ctx
, fp32
, fs
);
9284 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
9285 tcg_temp_free_i32(fp32
);
9286 gen_store_fpr64(ctx
, fp64
, fd
);
9287 tcg_temp_free_i64(fp64
);
9292 TCGv_i32 fp0
= tcg_temp_new_i32();
9294 gen_load_fpr32(ctx
, fp0
, fs
);
9296 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
9298 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
9300 gen_store_fpr32(ctx
, fp0
, fd
);
9301 tcg_temp_free_i32(fp0
);
9305 check_cp1_64bitmode(ctx
);
9307 TCGv_i32 fp32
= tcg_temp_new_i32();
9308 TCGv_i64 fp64
= tcg_temp_new_i64();
9310 gen_load_fpr32(ctx
, fp32
, fs
);
9312 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
9314 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
9316 tcg_temp_free_i32(fp32
);
9317 gen_store_fpr64(ctx
, fp64
, fd
);
9318 tcg_temp_free_i64(fp64
);
9324 TCGv_i64 fp64
= tcg_temp_new_i64();
9325 TCGv_i32 fp32_0
= tcg_temp_new_i32();
9326 TCGv_i32 fp32_1
= tcg_temp_new_i32();
9328 gen_load_fpr32(ctx
, fp32_0
, fs
);
9329 gen_load_fpr32(ctx
, fp32_1
, ft
);
9330 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
9331 tcg_temp_free_i32(fp32_1
);
9332 tcg_temp_free_i32(fp32_0
);
9333 gen_store_fpr64(ctx
, fp64
, fd
);
9334 tcg_temp_free_i64(fp64
);
9346 case OPC_CMP_NGLE_S
:
9353 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9354 if (ctx
->opcode
& (1 << 6)) {
9355 gen_cmpabs_s(ctx
, func
-48, ft
, fs
, cc
);
9357 gen_cmp_s(ctx
, func
-48, ft
, fs
, cc
);
9361 check_cp1_registers(ctx
, fs
| ft
| fd
);
9363 TCGv_i64 fp0
= tcg_temp_new_i64();
9364 TCGv_i64 fp1
= tcg_temp_new_i64();
9366 gen_load_fpr64(ctx
, fp0
, fs
);
9367 gen_load_fpr64(ctx
, fp1
, ft
);
9368 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
9369 tcg_temp_free_i64(fp1
);
9370 gen_store_fpr64(ctx
, fp0
, fd
);
9371 tcg_temp_free_i64(fp0
);
9375 check_cp1_registers(ctx
, fs
| ft
| fd
);
9377 TCGv_i64 fp0
= tcg_temp_new_i64();
9378 TCGv_i64 fp1
= tcg_temp_new_i64();
9380 gen_load_fpr64(ctx
, fp0
, fs
);
9381 gen_load_fpr64(ctx
, fp1
, ft
);
9382 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
9383 tcg_temp_free_i64(fp1
);
9384 gen_store_fpr64(ctx
, fp0
, fd
);
9385 tcg_temp_free_i64(fp0
);
9389 check_cp1_registers(ctx
, fs
| ft
| fd
);
9391 TCGv_i64 fp0
= tcg_temp_new_i64();
9392 TCGv_i64 fp1
= tcg_temp_new_i64();
9394 gen_load_fpr64(ctx
, fp0
, fs
);
9395 gen_load_fpr64(ctx
, fp1
, ft
);
9396 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
9397 tcg_temp_free_i64(fp1
);
9398 gen_store_fpr64(ctx
, fp0
, fd
);
9399 tcg_temp_free_i64(fp0
);
9403 check_cp1_registers(ctx
, fs
| ft
| fd
);
9405 TCGv_i64 fp0
= tcg_temp_new_i64();
9406 TCGv_i64 fp1
= tcg_temp_new_i64();
9408 gen_load_fpr64(ctx
, fp0
, fs
);
9409 gen_load_fpr64(ctx
, fp1
, ft
);
9410 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
9411 tcg_temp_free_i64(fp1
);
9412 gen_store_fpr64(ctx
, fp0
, fd
);
9413 tcg_temp_free_i64(fp0
);
9417 check_cp1_registers(ctx
, fs
| fd
);
9419 TCGv_i64 fp0
= tcg_temp_new_i64();
9421 gen_load_fpr64(ctx
, fp0
, fs
);
9422 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
9423 gen_store_fpr64(ctx
, fp0
, fd
);
9424 tcg_temp_free_i64(fp0
);
9428 check_cp1_registers(ctx
, fs
| fd
);
9430 TCGv_i64 fp0
= tcg_temp_new_i64();
9432 gen_load_fpr64(ctx
, fp0
, fs
);
9434 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
9436 gen_helper_float_abs_d(fp0
, fp0
);
9438 gen_store_fpr64(ctx
, fp0
, fd
);
9439 tcg_temp_free_i64(fp0
);
9443 check_cp1_registers(ctx
, fs
| fd
);
9445 TCGv_i64 fp0
= tcg_temp_new_i64();
9447 gen_load_fpr64(ctx
, fp0
, fs
);
9448 gen_store_fpr64(ctx
, fp0
, fd
);
9449 tcg_temp_free_i64(fp0
);
9453 check_cp1_registers(ctx
, fs
| fd
);
9455 TCGv_i64 fp0
= tcg_temp_new_i64();
9457 gen_load_fpr64(ctx
, fp0
, fs
);
9459 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
9461 gen_helper_float_chs_d(fp0
, fp0
);
9463 gen_store_fpr64(ctx
, fp0
, fd
);
9464 tcg_temp_free_i64(fp0
);
9468 check_cp1_64bitmode(ctx
);
9470 TCGv_i64 fp0
= tcg_temp_new_i64();
9472 gen_load_fpr64(ctx
, fp0
, fs
);
9474 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
9476 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
9478 gen_store_fpr64(ctx
, fp0
, fd
);
9479 tcg_temp_free_i64(fp0
);
9483 check_cp1_64bitmode(ctx
);
9485 TCGv_i64 fp0
= tcg_temp_new_i64();
9487 gen_load_fpr64(ctx
, fp0
, fs
);
9489 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
9491 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
9493 gen_store_fpr64(ctx
, fp0
, fd
);
9494 tcg_temp_free_i64(fp0
);
9498 check_cp1_64bitmode(ctx
);
9500 TCGv_i64 fp0
= tcg_temp_new_i64();
9502 gen_load_fpr64(ctx
, fp0
, fs
);
9504 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
9506 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
9508 gen_store_fpr64(ctx
, fp0
, fd
);
9509 tcg_temp_free_i64(fp0
);
9513 check_cp1_64bitmode(ctx
);
9515 TCGv_i64 fp0
= tcg_temp_new_i64();
9517 gen_load_fpr64(ctx
, fp0
, fs
);
9519 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
9521 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
9523 gen_store_fpr64(ctx
, fp0
, fd
);
9524 tcg_temp_free_i64(fp0
);
9528 check_cp1_registers(ctx
, fs
);
9530 TCGv_i32 fp32
= tcg_temp_new_i32();
9531 TCGv_i64 fp64
= tcg_temp_new_i64();
9533 gen_load_fpr64(ctx
, fp64
, fs
);
9535 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
9537 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
9539 tcg_temp_free_i64(fp64
);
9540 gen_store_fpr32(ctx
, fp32
, fd
);
9541 tcg_temp_free_i32(fp32
);
9545 check_cp1_registers(ctx
, fs
);
9547 TCGv_i32 fp32
= tcg_temp_new_i32();
9548 TCGv_i64 fp64
= tcg_temp_new_i64();
9550 gen_load_fpr64(ctx
, fp64
, fs
);
9552 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
9554 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
9556 tcg_temp_free_i64(fp64
);
9557 gen_store_fpr32(ctx
, fp32
, fd
);
9558 tcg_temp_free_i32(fp32
);
9562 check_cp1_registers(ctx
, fs
);
9564 TCGv_i32 fp32
= tcg_temp_new_i32();
9565 TCGv_i64 fp64
= tcg_temp_new_i64();
9567 gen_load_fpr64(ctx
, fp64
, fs
);
9569 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
9571 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
9573 tcg_temp_free_i64(fp64
);
9574 gen_store_fpr32(ctx
, fp32
, fd
);
9575 tcg_temp_free_i32(fp32
);
9579 check_cp1_registers(ctx
, fs
);
9581 TCGv_i32 fp32
= tcg_temp_new_i32();
9582 TCGv_i64 fp64
= tcg_temp_new_i64();
9584 gen_load_fpr64(ctx
, fp64
, fs
);
9586 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
9588 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
9590 tcg_temp_free_i64(fp64
);
9591 gen_store_fpr32(ctx
, fp32
, fd
);
9592 tcg_temp_free_i32(fp32
);
9596 check_insn(ctx
, ISA_MIPS32R6
);
9597 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9600 check_insn(ctx
, ISA_MIPS32R6
);
9601 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9604 check_insn(ctx
, ISA_MIPS32R6
);
9605 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
9608 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9609 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
9612 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9614 TCGLabel
*l1
= gen_new_label();
9618 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
9620 fp0
= tcg_temp_new_i64();
9621 gen_load_fpr64(ctx
, fp0
, fs
);
9622 gen_store_fpr64(ctx
, fp0
, fd
);
9623 tcg_temp_free_i64(fp0
);
9628 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9630 TCGLabel
*l1
= gen_new_label();
9634 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
9635 fp0
= tcg_temp_new_i64();
9636 gen_load_fpr64(ctx
, fp0
, fs
);
9637 gen_store_fpr64(ctx
, fp0
, fd
);
9638 tcg_temp_free_i64(fp0
);
9644 check_cp1_registers(ctx
, fs
| fd
);
9646 TCGv_i64 fp0
= tcg_temp_new_i64();
9648 gen_load_fpr64(ctx
, fp0
, fs
);
9649 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
9650 gen_store_fpr64(ctx
, fp0
, fd
);
9651 tcg_temp_free_i64(fp0
);
9655 check_cp1_registers(ctx
, fs
| fd
);
9657 TCGv_i64 fp0
= tcg_temp_new_i64();
9659 gen_load_fpr64(ctx
, fp0
, fs
);
9660 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
9661 gen_store_fpr64(ctx
, fp0
, fd
);
9662 tcg_temp_free_i64(fp0
);
9666 check_insn(ctx
, ISA_MIPS32R6
);
9668 TCGv_i64 fp0
= tcg_temp_new_i64();
9669 TCGv_i64 fp1
= tcg_temp_new_i64();
9670 TCGv_i64 fp2
= tcg_temp_new_i64();
9671 gen_load_fpr64(ctx
, fp0
, fs
);
9672 gen_load_fpr64(ctx
, fp1
, ft
);
9673 gen_load_fpr64(ctx
, fp2
, fd
);
9674 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9675 gen_store_fpr64(ctx
, fp2
, fd
);
9676 tcg_temp_free_i64(fp2
);
9677 tcg_temp_free_i64(fp1
);
9678 tcg_temp_free_i64(fp0
);
9682 check_insn(ctx
, ISA_MIPS32R6
);
9684 TCGv_i64 fp0
= tcg_temp_new_i64();
9685 TCGv_i64 fp1
= tcg_temp_new_i64();
9686 TCGv_i64 fp2
= tcg_temp_new_i64();
9687 gen_load_fpr64(ctx
, fp0
, fs
);
9688 gen_load_fpr64(ctx
, fp1
, ft
);
9689 gen_load_fpr64(ctx
, fp2
, fd
);
9690 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
9691 gen_store_fpr64(ctx
, fp2
, fd
);
9692 tcg_temp_free_i64(fp2
);
9693 tcg_temp_free_i64(fp1
);
9694 tcg_temp_free_i64(fp0
);
9698 check_insn(ctx
, ISA_MIPS32R6
);
9700 TCGv_i64 fp0
= tcg_temp_new_i64();
9701 gen_load_fpr64(ctx
, fp0
, fs
);
9702 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
9703 gen_store_fpr64(ctx
, fp0
, fd
);
9704 tcg_temp_free_i64(fp0
);
9708 check_insn(ctx
, ISA_MIPS32R6
);
9710 TCGv_i64 fp0
= tcg_temp_new_i64();
9711 gen_load_fpr64(ctx
, fp0
, fs
);
9712 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
9713 gen_store_fpr64(ctx
, fp0
, fd
);
9714 tcg_temp_free_i64(fp0
);
9717 case OPC_MIN_D
: /* OPC_RECIP2_D */
9718 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9720 TCGv_i64 fp0
= tcg_temp_new_i64();
9721 TCGv_i64 fp1
= tcg_temp_new_i64();
9722 gen_load_fpr64(ctx
, fp0
, fs
);
9723 gen_load_fpr64(ctx
, fp1
, ft
);
9724 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
9725 gen_store_fpr64(ctx
, fp1
, fd
);
9726 tcg_temp_free_i64(fp1
);
9727 tcg_temp_free_i64(fp0
);
9730 check_cp1_64bitmode(ctx
);
9732 TCGv_i64 fp0
= tcg_temp_new_i64();
9733 TCGv_i64 fp1
= tcg_temp_new_i64();
9735 gen_load_fpr64(ctx
, fp0
, fs
);
9736 gen_load_fpr64(ctx
, fp1
, ft
);
9737 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
9738 tcg_temp_free_i64(fp1
);
9739 gen_store_fpr64(ctx
, fp0
, fd
);
9740 tcg_temp_free_i64(fp0
);
9744 case OPC_MINA_D
: /* OPC_RECIP1_D */
9745 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9747 TCGv_i64 fp0
= tcg_temp_new_i64();
9748 TCGv_i64 fp1
= tcg_temp_new_i64();
9749 gen_load_fpr64(ctx
, fp0
, fs
);
9750 gen_load_fpr64(ctx
, fp1
, ft
);
9751 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
9752 gen_store_fpr64(ctx
, fp1
, fd
);
9753 tcg_temp_free_i64(fp1
);
9754 tcg_temp_free_i64(fp0
);
9757 check_cp1_64bitmode(ctx
);
9759 TCGv_i64 fp0
= tcg_temp_new_i64();
9761 gen_load_fpr64(ctx
, fp0
, fs
);
9762 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
9763 gen_store_fpr64(ctx
, fp0
, fd
);
9764 tcg_temp_free_i64(fp0
);
9768 case OPC_MAX_D
: /* OPC_RSQRT1_D */
9769 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9771 TCGv_i64 fp0
= tcg_temp_new_i64();
9772 TCGv_i64 fp1
= tcg_temp_new_i64();
9773 gen_load_fpr64(ctx
, fp0
, fs
);
9774 gen_load_fpr64(ctx
, fp1
, ft
);
9775 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
9776 gen_store_fpr64(ctx
, fp1
, fd
);
9777 tcg_temp_free_i64(fp1
);
9778 tcg_temp_free_i64(fp0
);
9781 check_cp1_64bitmode(ctx
);
9783 TCGv_i64 fp0
= tcg_temp_new_i64();
9785 gen_load_fpr64(ctx
, fp0
, fs
);
9786 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
9787 gen_store_fpr64(ctx
, fp0
, fd
);
9788 tcg_temp_free_i64(fp0
);
9792 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
9793 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
9795 TCGv_i64 fp0
= tcg_temp_new_i64();
9796 TCGv_i64 fp1
= tcg_temp_new_i64();
9797 gen_load_fpr64(ctx
, fp0
, fs
);
9798 gen_load_fpr64(ctx
, fp1
, ft
);
9799 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
9800 gen_store_fpr64(ctx
, fp1
, fd
);
9801 tcg_temp_free_i64(fp1
);
9802 tcg_temp_free_i64(fp0
);
9805 check_cp1_64bitmode(ctx
);
9807 TCGv_i64 fp0
= tcg_temp_new_i64();
9808 TCGv_i64 fp1
= tcg_temp_new_i64();
9810 gen_load_fpr64(ctx
, fp0
, fs
);
9811 gen_load_fpr64(ctx
, fp1
, ft
);
9812 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
9813 tcg_temp_free_i64(fp1
);
9814 gen_store_fpr64(ctx
, fp0
, fd
);
9815 tcg_temp_free_i64(fp0
);
9828 case OPC_CMP_NGLE_D
:
9835 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
9836 if (ctx
->opcode
& (1 << 6)) {
9837 gen_cmpabs_d(ctx
, func
-48, ft
, fs
, cc
);
9839 gen_cmp_d(ctx
, func
-48, ft
, fs
, cc
);
9843 check_cp1_registers(ctx
, fs
);
9845 TCGv_i32 fp32
= tcg_temp_new_i32();
9846 TCGv_i64 fp64
= tcg_temp_new_i64();
9848 gen_load_fpr64(ctx
, fp64
, fs
);
9849 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
9850 tcg_temp_free_i64(fp64
);
9851 gen_store_fpr32(ctx
, fp32
, fd
);
9852 tcg_temp_free_i32(fp32
);
9856 check_cp1_registers(ctx
, fs
);
9858 TCGv_i32 fp32
= tcg_temp_new_i32();
9859 TCGv_i64 fp64
= tcg_temp_new_i64();
9861 gen_load_fpr64(ctx
, fp64
, fs
);
9863 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
9865 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
9867 tcg_temp_free_i64(fp64
);
9868 gen_store_fpr32(ctx
, fp32
, fd
);
9869 tcg_temp_free_i32(fp32
);
9873 check_cp1_64bitmode(ctx
);
9875 TCGv_i64 fp0
= tcg_temp_new_i64();
9877 gen_load_fpr64(ctx
, fp0
, fs
);
9879 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
9881 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
9883 gen_store_fpr64(ctx
, fp0
, fd
);
9884 tcg_temp_free_i64(fp0
);
9889 TCGv_i32 fp0
= tcg_temp_new_i32();
9891 gen_load_fpr32(ctx
, fp0
, fs
);
9892 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
9893 gen_store_fpr32(ctx
, fp0
, fd
);
9894 tcg_temp_free_i32(fp0
);
9898 check_cp1_registers(ctx
, fd
);
9900 TCGv_i32 fp32
= tcg_temp_new_i32();
9901 TCGv_i64 fp64
= tcg_temp_new_i64();
9903 gen_load_fpr32(ctx
, fp32
, fs
);
9904 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
9905 tcg_temp_free_i32(fp32
);
9906 gen_store_fpr64(ctx
, fp64
, fd
);
9907 tcg_temp_free_i64(fp64
);
9911 check_cp1_64bitmode(ctx
);
9913 TCGv_i32 fp32
= tcg_temp_new_i32();
9914 TCGv_i64 fp64
= tcg_temp_new_i64();
9916 gen_load_fpr64(ctx
, fp64
, fs
);
9917 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
9918 tcg_temp_free_i64(fp64
);
9919 gen_store_fpr32(ctx
, fp32
, fd
);
9920 tcg_temp_free_i32(fp32
);
9924 check_cp1_64bitmode(ctx
);
9926 TCGv_i64 fp0
= tcg_temp_new_i64();
9928 gen_load_fpr64(ctx
, fp0
, fs
);
9929 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
9930 gen_store_fpr64(ctx
, fp0
, fd
);
9931 tcg_temp_free_i64(fp0
);
9937 TCGv_i64 fp0
= tcg_temp_new_i64();
9939 gen_load_fpr64(ctx
, fp0
, fs
);
9940 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
9941 gen_store_fpr64(ctx
, fp0
, fd
);
9942 tcg_temp_free_i64(fp0
);
9948 TCGv_i64 fp0
= tcg_temp_new_i64();
9949 TCGv_i64 fp1
= tcg_temp_new_i64();
9951 gen_load_fpr64(ctx
, fp0
, fs
);
9952 gen_load_fpr64(ctx
, fp1
, ft
);
9953 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
9954 tcg_temp_free_i64(fp1
);
9955 gen_store_fpr64(ctx
, fp0
, fd
);
9956 tcg_temp_free_i64(fp0
);
9962 TCGv_i64 fp0
= tcg_temp_new_i64();
9963 TCGv_i64 fp1
= tcg_temp_new_i64();
9965 gen_load_fpr64(ctx
, fp0
, fs
);
9966 gen_load_fpr64(ctx
, fp1
, ft
);
9967 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
9968 tcg_temp_free_i64(fp1
);
9969 gen_store_fpr64(ctx
, fp0
, fd
);
9970 tcg_temp_free_i64(fp0
);
9976 TCGv_i64 fp0
= tcg_temp_new_i64();
9977 TCGv_i64 fp1
= tcg_temp_new_i64();
9979 gen_load_fpr64(ctx
, fp0
, fs
);
9980 gen_load_fpr64(ctx
, fp1
, ft
);
9981 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
9982 tcg_temp_free_i64(fp1
);
9983 gen_store_fpr64(ctx
, fp0
, fd
);
9984 tcg_temp_free_i64(fp0
);
9990 TCGv_i64 fp0
= tcg_temp_new_i64();
9992 gen_load_fpr64(ctx
, fp0
, fs
);
9993 gen_helper_float_abs_ps(fp0
, fp0
);
9994 gen_store_fpr64(ctx
, fp0
, fd
);
9995 tcg_temp_free_i64(fp0
);
10001 TCGv_i64 fp0
= tcg_temp_new_i64();
10003 gen_load_fpr64(ctx
, fp0
, fs
);
10004 gen_store_fpr64(ctx
, fp0
, fd
);
10005 tcg_temp_free_i64(fp0
);
10011 TCGv_i64 fp0
= tcg_temp_new_i64();
10013 gen_load_fpr64(ctx
, fp0
, fs
);
10014 gen_helper_float_chs_ps(fp0
, fp0
);
10015 gen_store_fpr64(ctx
, fp0
, fd
);
10016 tcg_temp_free_i64(fp0
);
10021 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
10026 TCGLabel
*l1
= gen_new_label();
10030 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
10031 fp0
= tcg_temp_new_i64();
10032 gen_load_fpr64(ctx
, fp0
, fs
);
10033 gen_store_fpr64(ctx
, fp0
, fd
);
10034 tcg_temp_free_i64(fp0
);
10041 TCGLabel
*l1
= gen_new_label();
10045 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
10046 fp0
= tcg_temp_new_i64();
10047 gen_load_fpr64(ctx
, fp0
, fs
);
10048 gen_store_fpr64(ctx
, fp0
, fd
);
10049 tcg_temp_free_i64(fp0
);
10057 TCGv_i64 fp0
= tcg_temp_new_i64();
10058 TCGv_i64 fp1
= tcg_temp_new_i64();
10060 gen_load_fpr64(ctx
, fp0
, ft
);
10061 gen_load_fpr64(ctx
, fp1
, fs
);
10062 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
10063 tcg_temp_free_i64(fp1
);
10064 gen_store_fpr64(ctx
, fp0
, fd
);
10065 tcg_temp_free_i64(fp0
);
10071 TCGv_i64 fp0
= tcg_temp_new_i64();
10072 TCGv_i64 fp1
= tcg_temp_new_i64();
10074 gen_load_fpr64(ctx
, fp0
, ft
);
10075 gen_load_fpr64(ctx
, fp1
, fs
);
10076 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
10077 tcg_temp_free_i64(fp1
);
10078 gen_store_fpr64(ctx
, fp0
, fd
);
10079 tcg_temp_free_i64(fp0
);
10082 case OPC_RECIP2_PS
:
10085 TCGv_i64 fp0
= tcg_temp_new_i64();
10086 TCGv_i64 fp1
= tcg_temp_new_i64();
10088 gen_load_fpr64(ctx
, fp0
, fs
);
10089 gen_load_fpr64(ctx
, fp1
, ft
);
10090 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
10091 tcg_temp_free_i64(fp1
);
10092 gen_store_fpr64(ctx
, fp0
, fd
);
10093 tcg_temp_free_i64(fp0
);
10096 case OPC_RECIP1_PS
:
10099 TCGv_i64 fp0
= tcg_temp_new_i64();
10101 gen_load_fpr64(ctx
, fp0
, fs
);
10102 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
10103 gen_store_fpr64(ctx
, fp0
, fd
);
10104 tcg_temp_free_i64(fp0
);
10107 case OPC_RSQRT1_PS
:
10110 TCGv_i64 fp0
= tcg_temp_new_i64();
10112 gen_load_fpr64(ctx
, fp0
, fs
);
10113 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
10114 gen_store_fpr64(ctx
, fp0
, fd
);
10115 tcg_temp_free_i64(fp0
);
10118 case OPC_RSQRT2_PS
:
10121 TCGv_i64 fp0
= tcg_temp_new_i64();
10122 TCGv_i64 fp1
= tcg_temp_new_i64();
10124 gen_load_fpr64(ctx
, fp0
, fs
);
10125 gen_load_fpr64(ctx
, fp1
, ft
);
10126 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
10127 tcg_temp_free_i64(fp1
);
10128 gen_store_fpr64(ctx
, fp0
, fd
);
10129 tcg_temp_free_i64(fp0
);
10133 check_cp1_64bitmode(ctx
);
10135 TCGv_i32 fp0
= tcg_temp_new_i32();
10137 gen_load_fpr32h(ctx
, fp0
, fs
);
10138 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
10139 gen_store_fpr32(ctx
, fp0
, fd
);
10140 tcg_temp_free_i32(fp0
);
10143 case OPC_CVT_PW_PS
:
10146 TCGv_i64 fp0
= tcg_temp_new_i64();
10148 gen_load_fpr64(ctx
, fp0
, fs
);
10149 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
10150 gen_store_fpr64(ctx
, fp0
, fd
);
10151 tcg_temp_free_i64(fp0
);
10155 check_cp1_64bitmode(ctx
);
10157 TCGv_i32 fp0
= tcg_temp_new_i32();
10159 gen_load_fpr32(ctx
, fp0
, fs
);
10160 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
10161 gen_store_fpr32(ctx
, fp0
, fd
);
10162 tcg_temp_free_i32(fp0
);
10168 TCGv_i32 fp0
= tcg_temp_new_i32();
10169 TCGv_i32 fp1
= tcg_temp_new_i32();
10171 gen_load_fpr32(ctx
, fp0
, fs
);
10172 gen_load_fpr32(ctx
, fp1
, ft
);
10173 gen_store_fpr32h(ctx
, fp0
, fd
);
10174 gen_store_fpr32(ctx
, fp1
, fd
);
10175 tcg_temp_free_i32(fp0
);
10176 tcg_temp_free_i32(fp1
);
10182 TCGv_i32 fp0
= tcg_temp_new_i32();
10183 TCGv_i32 fp1
= tcg_temp_new_i32();
10185 gen_load_fpr32(ctx
, fp0
, fs
);
10186 gen_load_fpr32h(ctx
, fp1
, ft
);
10187 gen_store_fpr32(ctx
, fp1
, fd
);
10188 gen_store_fpr32h(ctx
, fp0
, fd
);
10189 tcg_temp_free_i32(fp0
);
10190 tcg_temp_free_i32(fp1
);
10196 TCGv_i32 fp0
= tcg_temp_new_i32();
10197 TCGv_i32 fp1
= tcg_temp_new_i32();
10199 gen_load_fpr32h(ctx
, fp0
, fs
);
10200 gen_load_fpr32(ctx
, fp1
, ft
);
10201 gen_store_fpr32(ctx
, fp1
, fd
);
10202 gen_store_fpr32h(ctx
, fp0
, fd
);
10203 tcg_temp_free_i32(fp0
);
10204 tcg_temp_free_i32(fp1
);
10210 TCGv_i32 fp0
= tcg_temp_new_i32();
10211 TCGv_i32 fp1
= tcg_temp_new_i32();
10213 gen_load_fpr32h(ctx
, fp0
, fs
);
10214 gen_load_fpr32h(ctx
, fp1
, ft
);
10215 gen_store_fpr32(ctx
, fp1
, fd
);
10216 gen_store_fpr32h(ctx
, fp0
, fd
);
10217 tcg_temp_free_i32(fp0
);
10218 tcg_temp_free_i32(fp1
);
10222 case OPC_CMP_UN_PS
:
10223 case OPC_CMP_EQ_PS
:
10224 case OPC_CMP_UEQ_PS
:
10225 case OPC_CMP_OLT_PS
:
10226 case OPC_CMP_ULT_PS
:
10227 case OPC_CMP_OLE_PS
:
10228 case OPC_CMP_ULE_PS
:
10229 case OPC_CMP_SF_PS
:
10230 case OPC_CMP_NGLE_PS
:
10231 case OPC_CMP_SEQ_PS
:
10232 case OPC_CMP_NGL_PS
:
10233 case OPC_CMP_LT_PS
:
10234 case OPC_CMP_NGE_PS
:
10235 case OPC_CMP_LE_PS
:
10236 case OPC_CMP_NGT_PS
:
10237 if (ctx
->opcode
& (1 << 6)) {
10238 gen_cmpabs_ps(ctx
, func
-48, ft
, fs
, cc
);
10240 gen_cmp_ps(ctx
, func
-48, ft
, fs
, cc
);
10244 MIPS_INVAL("farith");
10245 generate_exception_end(ctx
, EXCP_RI
);
10250 /* Coprocessor 3 (FPU) */
10251 static void gen_flt3_ldst (DisasContext
*ctx
, uint32_t opc
,
10252 int fd
, int fs
, int base
, int index
)
10254 TCGv t0
= tcg_temp_new();
10257 gen_load_gpr(t0
, index
);
10258 } else if (index
== 0) {
10259 gen_load_gpr(t0
, base
);
10261 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
10263 /* Don't do NOP if destination is zero: we must perform the actual
10269 TCGv_i32 fp0
= tcg_temp_new_i32();
10271 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
10272 tcg_gen_trunc_tl_i32(fp0
, t0
);
10273 gen_store_fpr32(ctx
, fp0
, fd
);
10274 tcg_temp_free_i32(fp0
);
10279 check_cp1_registers(ctx
, fd
);
10281 TCGv_i64 fp0
= tcg_temp_new_i64();
10282 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10283 gen_store_fpr64(ctx
, fp0
, fd
);
10284 tcg_temp_free_i64(fp0
);
10288 check_cp1_64bitmode(ctx
);
10289 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10291 TCGv_i64 fp0
= tcg_temp_new_i64();
10293 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10294 gen_store_fpr64(ctx
, fp0
, fd
);
10295 tcg_temp_free_i64(fp0
);
10301 TCGv_i32 fp0
= tcg_temp_new_i32();
10302 gen_load_fpr32(ctx
, fp0
, fs
);
10303 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
10304 tcg_temp_free_i32(fp0
);
10309 check_cp1_registers(ctx
, fs
);
10311 TCGv_i64 fp0
= tcg_temp_new_i64();
10312 gen_load_fpr64(ctx
, fp0
, fs
);
10313 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10314 tcg_temp_free_i64(fp0
);
10318 check_cp1_64bitmode(ctx
);
10319 tcg_gen_andi_tl(t0
, t0
, ~0x7);
10321 TCGv_i64 fp0
= tcg_temp_new_i64();
10322 gen_load_fpr64(ctx
, fp0
, fs
);
10323 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
10324 tcg_temp_free_i64(fp0
);
10331 static void gen_flt3_arith (DisasContext
*ctx
, uint32_t opc
,
10332 int fd
, int fr
, int fs
, int ft
)
10338 TCGv t0
= tcg_temp_local_new();
10339 TCGv_i32 fp
= tcg_temp_new_i32();
10340 TCGv_i32 fph
= tcg_temp_new_i32();
10341 TCGLabel
*l1
= gen_new_label();
10342 TCGLabel
*l2
= gen_new_label();
10344 gen_load_gpr(t0
, fr
);
10345 tcg_gen_andi_tl(t0
, t0
, 0x7);
10347 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
10348 gen_load_fpr32(ctx
, fp
, fs
);
10349 gen_load_fpr32h(ctx
, fph
, fs
);
10350 gen_store_fpr32(ctx
, fp
, fd
);
10351 gen_store_fpr32h(ctx
, fph
, fd
);
10354 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
10356 #ifdef TARGET_WORDS_BIGENDIAN
10357 gen_load_fpr32(ctx
, fp
, fs
);
10358 gen_load_fpr32h(ctx
, fph
, ft
);
10359 gen_store_fpr32h(ctx
, fp
, fd
);
10360 gen_store_fpr32(ctx
, fph
, fd
);
10362 gen_load_fpr32h(ctx
, fph
, fs
);
10363 gen_load_fpr32(ctx
, fp
, ft
);
10364 gen_store_fpr32(ctx
, fph
, fd
);
10365 gen_store_fpr32h(ctx
, fp
, fd
);
10368 tcg_temp_free_i32(fp
);
10369 tcg_temp_free_i32(fph
);
10375 TCGv_i32 fp0
= tcg_temp_new_i32();
10376 TCGv_i32 fp1
= tcg_temp_new_i32();
10377 TCGv_i32 fp2
= tcg_temp_new_i32();
10379 gen_load_fpr32(ctx
, fp0
, fs
);
10380 gen_load_fpr32(ctx
, fp1
, ft
);
10381 gen_load_fpr32(ctx
, fp2
, fr
);
10382 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10383 tcg_temp_free_i32(fp0
);
10384 tcg_temp_free_i32(fp1
);
10385 gen_store_fpr32(ctx
, fp2
, fd
);
10386 tcg_temp_free_i32(fp2
);
10391 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10393 TCGv_i64 fp0
= tcg_temp_new_i64();
10394 TCGv_i64 fp1
= tcg_temp_new_i64();
10395 TCGv_i64 fp2
= tcg_temp_new_i64();
10397 gen_load_fpr64(ctx
, fp0
, fs
);
10398 gen_load_fpr64(ctx
, fp1
, ft
);
10399 gen_load_fpr64(ctx
, fp2
, fr
);
10400 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10401 tcg_temp_free_i64(fp0
);
10402 tcg_temp_free_i64(fp1
);
10403 gen_store_fpr64(ctx
, fp2
, fd
);
10404 tcg_temp_free_i64(fp2
);
10410 TCGv_i64 fp0
= tcg_temp_new_i64();
10411 TCGv_i64 fp1
= tcg_temp_new_i64();
10412 TCGv_i64 fp2
= tcg_temp_new_i64();
10414 gen_load_fpr64(ctx
, fp0
, fs
);
10415 gen_load_fpr64(ctx
, fp1
, ft
);
10416 gen_load_fpr64(ctx
, fp2
, fr
);
10417 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10418 tcg_temp_free_i64(fp0
);
10419 tcg_temp_free_i64(fp1
);
10420 gen_store_fpr64(ctx
, fp2
, fd
);
10421 tcg_temp_free_i64(fp2
);
10427 TCGv_i32 fp0
= tcg_temp_new_i32();
10428 TCGv_i32 fp1
= tcg_temp_new_i32();
10429 TCGv_i32 fp2
= tcg_temp_new_i32();
10431 gen_load_fpr32(ctx
, fp0
, fs
);
10432 gen_load_fpr32(ctx
, fp1
, ft
);
10433 gen_load_fpr32(ctx
, fp2
, fr
);
10434 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10435 tcg_temp_free_i32(fp0
);
10436 tcg_temp_free_i32(fp1
);
10437 gen_store_fpr32(ctx
, fp2
, fd
);
10438 tcg_temp_free_i32(fp2
);
10443 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10445 TCGv_i64 fp0
= tcg_temp_new_i64();
10446 TCGv_i64 fp1
= tcg_temp_new_i64();
10447 TCGv_i64 fp2
= tcg_temp_new_i64();
10449 gen_load_fpr64(ctx
, fp0
, fs
);
10450 gen_load_fpr64(ctx
, fp1
, ft
);
10451 gen_load_fpr64(ctx
, fp2
, fr
);
10452 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10453 tcg_temp_free_i64(fp0
);
10454 tcg_temp_free_i64(fp1
);
10455 gen_store_fpr64(ctx
, fp2
, fd
);
10456 tcg_temp_free_i64(fp2
);
10462 TCGv_i64 fp0
= tcg_temp_new_i64();
10463 TCGv_i64 fp1
= tcg_temp_new_i64();
10464 TCGv_i64 fp2
= tcg_temp_new_i64();
10466 gen_load_fpr64(ctx
, fp0
, fs
);
10467 gen_load_fpr64(ctx
, fp1
, ft
);
10468 gen_load_fpr64(ctx
, fp2
, fr
);
10469 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10470 tcg_temp_free_i64(fp0
);
10471 tcg_temp_free_i64(fp1
);
10472 gen_store_fpr64(ctx
, fp2
, fd
);
10473 tcg_temp_free_i64(fp2
);
10479 TCGv_i32 fp0
= tcg_temp_new_i32();
10480 TCGv_i32 fp1
= tcg_temp_new_i32();
10481 TCGv_i32 fp2
= tcg_temp_new_i32();
10483 gen_load_fpr32(ctx
, fp0
, fs
);
10484 gen_load_fpr32(ctx
, fp1
, ft
);
10485 gen_load_fpr32(ctx
, fp2
, fr
);
10486 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10487 tcg_temp_free_i32(fp0
);
10488 tcg_temp_free_i32(fp1
);
10489 gen_store_fpr32(ctx
, fp2
, fd
);
10490 tcg_temp_free_i32(fp2
);
10495 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10497 TCGv_i64 fp0
= tcg_temp_new_i64();
10498 TCGv_i64 fp1
= tcg_temp_new_i64();
10499 TCGv_i64 fp2
= tcg_temp_new_i64();
10501 gen_load_fpr64(ctx
, fp0
, fs
);
10502 gen_load_fpr64(ctx
, fp1
, ft
);
10503 gen_load_fpr64(ctx
, fp2
, fr
);
10504 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10505 tcg_temp_free_i64(fp0
);
10506 tcg_temp_free_i64(fp1
);
10507 gen_store_fpr64(ctx
, fp2
, fd
);
10508 tcg_temp_free_i64(fp2
);
10514 TCGv_i64 fp0
= tcg_temp_new_i64();
10515 TCGv_i64 fp1
= tcg_temp_new_i64();
10516 TCGv_i64 fp2
= tcg_temp_new_i64();
10518 gen_load_fpr64(ctx
, fp0
, fs
);
10519 gen_load_fpr64(ctx
, fp1
, ft
);
10520 gen_load_fpr64(ctx
, fp2
, fr
);
10521 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10522 tcg_temp_free_i64(fp0
);
10523 tcg_temp_free_i64(fp1
);
10524 gen_store_fpr64(ctx
, fp2
, fd
);
10525 tcg_temp_free_i64(fp2
);
10531 TCGv_i32 fp0
= tcg_temp_new_i32();
10532 TCGv_i32 fp1
= tcg_temp_new_i32();
10533 TCGv_i32 fp2
= tcg_temp_new_i32();
10535 gen_load_fpr32(ctx
, fp0
, fs
);
10536 gen_load_fpr32(ctx
, fp1
, ft
);
10537 gen_load_fpr32(ctx
, fp2
, fr
);
10538 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10539 tcg_temp_free_i32(fp0
);
10540 tcg_temp_free_i32(fp1
);
10541 gen_store_fpr32(ctx
, fp2
, fd
);
10542 tcg_temp_free_i32(fp2
);
10547 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
10549 TCGv_i64 fp0
= tcg_temp_new_i64();
10550 TCGv_i64 fp1
= tcg_temp_new_i64();
10551 TCGv_i64 fp2
= tcg_temp_new_i64();
10553 gen_load_fpr64(ctx
, fp0
, fs
);
10554 gen_load_fpr64(ctx
, fp1
, ft
);
10555 gen_load_fpr64(ctx
, fp2
, fr
);
10556 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10557 tcg_temp_free_i64(fp0
);
10558 tcg_temp_free_i64(fp1
);
10559 gen_store_fpr64(ctx
, fp2
, fd
);
10560 tcg_temp_free_i64(fp2
);
10566 TCGv_i64 fp0
= tcg_temp_new_i64();
10567 TCGv_i64 fp1
= tcg_temp_new_i64();
10568 TCGv_i64 fp2
= tcg_temp_new_i64();
10570 gen_load_fpr64(ctx
, fp0
, fs
);
10571 gen_load_fpr64(ctx
, fp1
, ft
);
10572 gen_load_fpr64(ctx
, fp2
, fr
);
10573 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
10574 tcg_temp_free_i64(fp0
);
10575 tcg_temp_free_i64(fp1
);
10576 gen_store_fpr64(ctx
, fp2
, fd
);
10577 tcg_temp_free_i64(fp2
);
10581 MIPS_INVAL("flt3_arith");
10582 generate_exception_end(ctx
, EXCP_RI
);
10587 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
10591 #if !defined(CONFIG_USER_ONLY)
10592 /* The Linux kernel will emulate rdhwr if it's not supported natively.
10593 Therefore only check the ISA in system mode. */
10594 check_insn(ctx
, ISA_MIPS32R2
);
10596 t0
= tcg_temp_new();
10600 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
10601 gen_store_gpr(t0
, rt
);
10604 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
10605 gen_store_gpr(t0
, rt
);
10608 gen_helper_rdhwr_cc(t0
, cpu_env
);
10609 gen_store_gpr(t0
, rt
);
10612 gen_helper_rdhwr_ccres(t0
, cpu_env
);
10613 gen_store_gpr(t0
, rt
);
10616 check_insn(ctx
, ISA_MIPS32R6
);
10618 /* Performance counter registers are not implemented other than
10619 * control register 0.
10621 generate_exception(ctx
, EXCP_RI
);
10623 gen_helper_rdhwr_performance(t0
, cpu_env
);
10624 gen_store_gpr(t0
, rt
);
10627 check_insn(ctx
, ISA_MIPS32R6
);
10628 gen_helper_rdhwr_xnp(t0
, cpu_env
);
10629 gen_store_gpr(t0
, rt
);
10632 #if defined(CONFIG_USER_ONLY)
10633 tcg_gen_ld_tl(t0
, cpu_env
,
10634 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10635 gen_store_gpr(t0
, rt
);
10638 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
10639 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
10640 tcg_gen_ld_tl(t0
, cpu_env
,
10641 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
10642 gen_store_gpr(t0
, rt
);
10644 generate_exception_end(ctx
, EXCP_RI
);
10648 default: /* Invalid */
10649 MIPS_INVAL("rdhwr");
10650 generate_exception_end(ctx
, EXCP_RI
);
10656 static inline void clear_branch_hflags(DisasContext
*ctx
)
10658 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
10659 if (ctx
->bstate
== BS_NONE
) {
10660 save_cpu_state(ctx
, 0);
10662 /* it is not safe to save ctx->hflags as hflags may be changed
10663 in execution time by the instruction in delay / forbidden slot. */
10664 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
10668 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
10670 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10671 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
10672 /* Branches completion */
10673 clear_branch_hflags(ctx
);
10674 ctx
->bstate
= BS_BRANCH
;
10675 /* FIXME: Need to clear can_do_io. */
10676 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
10677 case MIPS_HFLAG_FBNSLOT
:
10678 gen_goto_tb(ctx
, 0, ctx
->pc
+ insn_bytes
);
10681 /* unconditional branch */
10682 if (proc_hflags
& MIPS_HFLAG_BX
) {
10683 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
10685 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10687 case MIPS_HFLAG_BL
:
10688 /* blikely taken case */
10689 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10691 case MIPS_HFLAG_BC
:
10692 /* Conditional branch */
10694 TCGLabel
*l1
= gen_new_label();
10696 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
10697 gen_goto_tb(ctx
, 1, ctx
->pc
+ insn_bytes
);
10699 gen_goto_tb(ctx
, 0, ctx
->btarget
);
10702 case MIPS_HFLAG_BR
:
10703 /* unconditional branch to register */
10704 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
10705 TCGv t0
= tcg_temp_new();
10706 TCGv_i32 t1
= tcg_temp_new_i32();
10708 tcg_gen_andi_tl(t0
, btarget
, 0x1);
10709 tcg_gen_trunc_tl_i32(t1
, t0
);
10711 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
10712 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
10713 tcg_gen_or_i32(hflags
, hflags
, t1
);
10714 tcg_temp_free_i32(t1
);
10716 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
10718 tcg_gen_mov_tl(cpu_PC
, btarget
);
10720 if (ctx
->singlestep_enabled
) {
10721 save_cpu_state(ctx
, 0);
10722 gen_helper_raise_exception_debug(cpu_env
);
10724 tcg_gen_exit_tb(0);
10727 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
10733 /* Compact Branches */
10734 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
10735 int rs
, int rt
, int32_t offset
)
10737 int bcond_compute
= 0;
10738 TCGv t0
= tcg_temp_new();
10739 TCGv t1
= tcg_temp_new();
10740 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
10742 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10743 #ifdef MIPS_DEBUG_DISAS
10744 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10747 generate_exception_end(ctx
, EXCP_RI
);
10751 /* Load needed operands and calculate btarget */
10753 /* compact branch */
10754 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10755 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10756 gen_load_gpr(t0
, rs
);
10757 gen_load_gpr(t1
, rt
);
10759 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10760 if (rs
<= rt
&& rs
== 0) {
10761 /* OPC_BEQZALC, OPC_BNEZALC */
10762 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10765 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10766 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10767 gen_load_gpr(t0
, rs
);
10768 gen_load_gpr(t1
, rt
);
10770 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10772 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10773 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10774 if (rs
== 0 || rs
== rt
) {
10775 /* OPC_BLEZALC, OPC_BGEZALC */
10776 /* OPC_BGTZALC, OPC_BLTZALC */
10777 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10779 gen_load_gpr(t0
, rs
);
10780 gen_load_gpr(t1
, rt
);
10782 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10786 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10791 /* OPC_BEQZC, OPC_BNEZC */
10792 gen_load_gpr(t0
, rs
);
10794 ctx
->btarget
= addr_add(ctx
, ctx
->pc
+ 4, offset
);
10796 /* OPC_JIC, OPC_JIALC */
10797 TCGv tbase
= tcg_temp_new();
10798 TCGv toffset
= tcg_temp_new();
10800 gen_load_gpr(tbase
, rt
);
10801 tcg_gen_movi_tl(toffset
, offset
);
10802 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
10803 tcg_temp_free(tbase
);
10804 tcg_temp_free(toffset
);
10808 MIPS_INVAL("Compact branch/jump");
10809 generate_exception_end(ctx
, EXCP_RI
);
10813 if (bcond_compute
== 0) {
10814 /* Uncoditional compact branch */
10817 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10820 ctx
->hflags
|= MIPS_HFLAG_BR
;
10823 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->pc
+ 4 + m16_lowbit
);
10826 ctx
->hflags
|= MIPS_HFLAG_B
;
10829 MIPS_INVAL("Compact branch/jump");
10830 generate_exception_end(ctx
, EXCP_RI
);
10834 /* Generating branch here as compact branches don't have delay slot */
10835 gen_branch(ctx
, 4);
10837 /* Conditional compact branch */
10838 TCGLabel
*fs
= gen_new_label();
10839 save_cpu_state(ctx
, 0);
10842 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
10843 if (rs
== 0 && rt
!= 0) {
10845 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10846 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10848 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10851 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
10854 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
10855 if (rs
== 0 && rt
!= 0) {
10857 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10858 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10860 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10863 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
10866 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
10867 if (rs
== 0 && rt
!= 0) {
10869 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
10870 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10872 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
10875 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
10878 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
10879 if (rs
== 0 && rt
!= 0) {
10881 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
10882 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
10884 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
10887 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
10890 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
10891 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
10893 /* OPC_BOVC, OPC_BNVC */
10894 TCGv t2
= tcg_temp_new();
10895 TCGv t3
= tcg_temp_new();
10896 TCGv t4
= tcg_temp_new();
10897 TCGv input_overflow
= tcg_temp_new();
10899 gen_load_gpr(t0
, rs
);
10900 gen_load_gpr(t1
, rt
);
10901 tcg_gen_ext32s_tl(t2
, t0
);
10902 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
10903 tcg_gen_ext32s_tl(t3
, t1
);
10904 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
10905 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
10907 tcg_gen_add_tl(t4
, t2
, t3
);
10908 tcg_gen_ext32s_tl(t4
, t4
);
10909 tcg_gen_xor_tl(t2
, t2
, t3
);
10910 tcg_gen_xor_tl(t3
, t4
, t3
);
10911 tcg_gen_andc_tl(t2
, t3
, t2
);
10912 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
10913 tcg_gen_or_tl(t4
, t4
, input_overflow
);
10914 if (opc
== OPC_BOVC
) {
10916 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
10919 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
10921 tcg_temp_free(input_overflow
);
10925 } else if (rs
< rt
&& rs
== 0) {
10926 /* OPC_BEQZALC, OPC_BNEZALC */
10927 if (opc
== OPC_BEQZALC
) {
10929 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
10932 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
10935 /* OPC_BEQC, OPC_BNEC */
10936 if (opc
== OPC_BEQC
) {
10938 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
10941 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
10946 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
10949 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
10952 MIPS_INVAL("Compact conditional branch/jump");
10953 generate_exception_end(ctx
, EXCP_RI
);
10957 /* Generating branch here as compact branches don't have delay slot */
10958 gen_goto_tb(ctx
, 1, ctx
->btarget
);
10961 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
10969 /* ISA extensions (ASEs) */
10970 /* MIPS16 extension to MIPS32 */
10972 /* MIPS16 major opcodes */
10974 M16_OPC_ADDIUSP
= 0x00,
10975 M16_OPC_ADDIUPC
= 0x01,
10977 M16_OPC_JAL
= 0x03,
10978 M16_OPC_BEQZ
= 0x04,
10979 M16_OPC_BNEQZ
= 0x05,
10980 M16_OPC_SHIFT
= 0x06,
10982 M16_OPC_RRIA
= 0x08,
10983 M16_OPC_ADDIU8
= 0x09,
10984 M16_OPC_SLTI
= 0x0a,
10985 M16_OPC_SLTIU
= 0x0b,
10988 M16_OPC_CMPI
= 0x0e,
10992 M16_OPC_LWSP
= 0x12,
10994 M16_OPC_LBU
= 0x14,
10995 M16_OPC_LHU
= 0x15,
10996 M16_OPC_LWPC
= 0x16,
10997 M16_OPC_LWU
= 0x17,
11000 M16_OPC_SWSP
= 0x1a,
11002 M16_OPC_RRR
= 0x1c,
11004 M16_OPC_EXTEND
= 0x1e,
11008 /* I8 funct field */
11027 /* RR funct field */
11061 /* I64 funct field */
11069 I64_DADDIUPC
= 0x6,
11073 /* RR ry field for CNVT */
11075 RR_RY_CNVT_ZEB
= 0x0,
11076 RR_RY_CNVT_ZEH
= 0x1,
11077 RR_RY_CNVT_ZEW
= 0x2,
11078 RR_RY_CNVT_SEB
= 0x4,
11079 RR_RY_CNVT_SEH
= 0x5,
11080 RR_RY_CNVT_SEW
= 0x6,
11083 static int xlat (int r
)
11085 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
11090 static void gen_mips16_save (DisasContext
*ctx
,
11091 int xsregs
, int aregs
,
11092 int do_ra
, int do_s0
, int do_s1
,
11095 TCGv t0
= tcg_temp_new();
11096 TCGv t1
= tcg_temp_new();
11097 TCGv t2
= tcg_temp_new();
11127 generate_exception_end(ctx
, EXCP_RI
);
11133 gen_base_offset_addr(ctx
, t0
, 29, 12);
11134 gen_load_gpr(t1
, 7);
11135 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11138 gen_base_offset_addr(ctx
, t0
, 29, 8);
11139 gen_load_gpr(t1
, 6);
11140 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11143 gen_base_offset_addr(ctx
, t0
, 29, 4);
11144 gen_load_gpr(t1
, 5);
11145 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11148 gen_base_offset_addr(ctx
, t0
, 29, 0);
11149 gen_load_gpr(t1
, 4);
11150 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
11153 gen_load_gpr(t0
, 29);
11155 #define DECR_AND_STORE(reg) do { \
11156 tcg_gen_movi_tl(t2, -4); \
11157 gen_op_addr_add(ctx, t0, t0, t2); \
11158 gen_load_gpr(t1, reg); \
11159 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
11163 DECR_AND_STORE(31);
11168 DECR_AND_STORE(30);
11171 DECR_AND_STORE(23);
11174 DECR_AND_STORE(22);
11177 DECR_AND_STORE(21);
11180 DECR_AND_STORE(20);
11183 DECR_AND_STORE(19);
11186 DECR_AND_STORE(18);
11190 DECR_AND_STORE(17);
11193 DECR_AND_STORE(16);
11223 generate_exception_end(ctx
, EXCP_RI
);
11239 #undef DECR_AND_STORE
11241 tcg_gen_movi_tl(t2
, -framesize
);
11242 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11248 static void gen_mips16_restore (DisasContext
*ctx
,
11249 int xsregs
, int aregs
,
11250 int do_ra
, int do_s0
, int do_s1
,
11254 TCGv t0
= tcg_temp_new();
11255 TCGv t1
= tcg_temp_new();
11256 TCGv t2
= tcg_temp_new();
11258 tcg_gen_movi_tl(t2
, framesize
);
11259 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
11261 #define DECR_AND_LOAD(reg) do { \
11262 tcg_gen_movi_tl(t2, -4); \
11263 gen_op_addr_add(ctx, t0, t0, t2); \
11264 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
11265 gen_store_gpr(t1, reg); \
11329 generate_exception_end(ctx
, EXCP_RI
);
11345 #undef DECR_AND_LOAD
11347 tcg_gen_movi_tl(t2
, framesize
);
11348 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
11354 static void gen_addiupc (DisasContext
*ctx
, int rx
, int imm
,
11355 int is_64_bit
, int extended
)
11359 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11360 generate_exception_end(ctx
, EXCP_RI
);
11364 t0
= tcg_temp_new();
11366 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
11367 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
11369 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
11375 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
11378 TCGv_i32 t0
= tcg_const_i32(op
);
11379 TCGv t1
= tcg_temp_new();
11380 gen_base_offset_addr(ctx
, t1
, base
, offset
);
11381 gen_helper_cache(cpu_env
, t1
, t0
);
11384 #if defined(TARGET_MIPS64)
11385 static void decode_i64_mips16 (DisasContext
*ctx
,
11386 int ry
, int funct
, int16_t offset
,
11391 check_insn(ctx
, ISA_MIPS3
);
11392 check_mips_64(ctx
);
11393 offset
= extended
? offset
: offset
<< 3;
11394 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
11397 check_insn(ctx
, ISA_MIPS3
);
11398 check_mips_64(ctx
);
11399 offset
= extended
? offset
: offset
<< 3;
11400 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
11403 check_insn(ctx
, ISA_MIPS3
);
11404 check_mips_64(ctx
);
11405 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
11406 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
11409 check_insn(ctx
, ISA_MIPS3
);
11410 check_mips_64(ctx
);
11411 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
11412 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
11415 check_insn(ctx
, ISA_MIPS3
);
11416 check_mips_64(ctx
);
11417 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
11418 generate_exception_end(ctx
, EXCP_RI
);
11420 offset
= extended
? offset
: offset
<< 3;
11421 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
11425 check_insn(ctx
, ISA_MIPS3
);
11426 check_mips_64(ctx
);
11427 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
11428 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
11431 check_insn(ctx
, ISA_MIPS3
);
11432 check_mips_64(ctx
);
11433 offset
= extended
? offset
: offset
<< 2;
11434 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
11437 check_insn(ctx
, ISA_MIPS3
);
11438 check_mips_64(ctx
);
11439 offset
= extended
? offset
: offset
<< 2;
11440 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
11446 static int decode_extended_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11448 int extend
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11449 int op
, rx
, ry
, funct
, sa
;
11450 int16_t imm
, offset
;
11452 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
11453 op
= (ctx
->opcode
>> 11) & 0x1f;
11454 sa
= (ctx
->opcode
>> 22) & 0x1f;
11455 funct
= (ctx
->opcode
>> 8) & 0x7;
11456 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11457 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11458 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
11459 | ((ctx
->opcode
>> 21) & 0x3f) << 5
11460 | (ctx
->opcode
& 0x1f));
11462 /* The extended opcodes cleverly reuse the opcodes from their 16-bit
11465 case M16_OPC_ADDIUSP
:
11466 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11468 case M16_OPC_ADDIUPC
:
11469 gen_addiupc(ctx
, rx
, imm
, 0, 1);
11472 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
11473 /* No delay slot, so just process as a normal instruction */
11476 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
11477 /* No delay slot, so just process as a normal instruction */
11479 case M16_OPC_BNEQZ
:
11480 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
11481 /* No delay slot, so just process as a normal instruction */
11483 case M16_OPC_SHIFT
:
11484 switch (ctx
->opcode
& 0x3) {
11486 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11489 #if defined(TARGET_MIPS64)
11490 check_mips_64(ctx
);
11491 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11493 generate_exception_end(ctx
, EXCP_RI
);
11497 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11500 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11504 #if defined(TARGET_MIPS64)
11506 check_insn(ctx
, ISA_MIPS3
);
11507 check_mips_64(ctx
);
11508 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
11512 imm
= ctx
->opcode
& 0xf;
11513 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
11514 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
11515 imm
= (int16_t) (imm
<< 1) >> 1;
11516 if ((ctx
->opcode
>> 4) & 0x1) {
11517 #if defined(TARGET_MIPS64)
11518 check_mips_64(ctx
);
11519 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11521 generate_exception_end(ctx
, EXCP_RI
);
11524 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11527 case M16_OPC_ADDIU8
:
11528 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11531 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11533 case M16_OPC_SLTIU
:
11534 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11539 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
11542 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
11545 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
11548 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
11551 check_insn(ctx
, ISA_MIPS32
);
11553 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
11554 int aregs
= (ctx
->opcode
>> 16) & 0xf;
11555 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
11556 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
11557 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
11558 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
11559 | (ctx
->opcode
& 0xf)) << 3;
11561 if (ctx
->opcode
& (1 << 7)) {
11562 gen_mips16_save(ctx
, xsregs
, aregs
,
11563 do_ra
, do_s0
, do_s1
,
11566 gen_mips16_restore(ctx
, xsregs
, aregs
,
11567 do_ra
, do_s0
, do_s1
,
11573 generate_exception_end(ctx
, EXCP_RI
);
11578 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
11581 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
11583 #if defined(TARGET_MIPS64)
11585 check_insn(ctx
, ISA_MIPS3
);
11586 check_mips_64(ctx
);
11587 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
11591 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11594 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
11597 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
11600 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
11603 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11606 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
11609 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
11611 #if defined(TARGET_MIPS64)
11613 check_insn(ctx
, ISA_MIPS3
);
11614 check_mips_64(ctx
);
11615 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
11619 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11622 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
11625 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
11628 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
11630 #if defined(TARGET_MIPS64)
11632 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
11636 generate_exception_end(ctx
, EXCP_RI
);
11643 static inline bool is_uhi(int sdbbp_code
)
11645 #ifdef CONFIG_USER_ONLY
11648 return semihosting_enabled() && sdbbp_code
== 1;
11652 static int decode_mips16_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
11656 int op
, cnvt_op
, op1
, offset
;
11660 op
= (ctx
->opcode
>> 11) & 0x1f;
11661 sa
= (ctx
->opcode
>> 2) & 0x7;
11662 sa
= sa
== 0 ? 8 : sa
;
11663 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
11664 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
11665 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
11666 op1
= offset
= ctx
->opcode
& 0x1f;
11671 case M16_OPC_ADDIUSP
:
11673 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
11675 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
11678 case M16_OPC_ADDIUPC
:
11679 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
11682 offset
= (ctx
->opcode
& 0x7ff) << 1;
11683 offset
= (int16_t)(offset
<< 4) >> 4;
11684 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
11685 /* No delay slot, so just process as a normal instruction */
11688 offset
= cpu_lduw_code(env
, ctx
->pc
+ 2);
11689 offset
= (((ctx
->opcode
& 0x1f) << 21)
11690 | ((ctx
->opcode
>> 5) & 0x1f) << 16
11692 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
11693 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
11697 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
11698 ((int8_t)ctx
->opcode
) << 1, 0);
11699 /* No delay slot, so just process as a normal instruction */
11701 case M16_OPC_BNEQZ
:
11702 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
11703 ((int8_t)ctx
->opcode
) << 1, 0);
11704 /* No delay slot, so just process as a normal instruction */
11706 case M16_OPC_SHIFT
:
11707 switch (ctx
->opcode
& 0x3) {
11709 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
11712 #if defined(TARGET_MIPS64)
11713 check_insn(ctx
, ISA_MIPS3
);
11714 check_mips_64(ctx
);
11715 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
11717 generate_exception_end(ctx
, EXCP_RI
);
11721 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
11724 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
11728 #if defined(TARGET_MIPS64)
11730 check_insn(ctx
, ISA_MIPS3
);
11731 check_mips_64(ctx
);
11732 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
11737 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
11739 if ((ctx
->opcode
>> 4) & 1) {
11740 #if defined(TARGET_MIPS64)
11741 check_insn(ctx
, ISA_MIPS3
);
11742 check_mips_64(ctx
);
11743 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
11745 generate_exception_end(ctx
, EXCP_RI
);
11748 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
11752 case M16_OPC_ADDIU8
:
11754 int16_t imm
= (int8_t) ctx
->opcode
;
11756 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
11761 int16_t imm
= (uint8_t) ctx
->opcode
;
11762 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
11765 case M16_OPC_SLTIU
:
11767 int16_t imm
= (uint8_t) ctx
->opcode
;
11768 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
11775 funct
= (ctx
->opcode
>> 8) & 0x7;
11778 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
11779 ((int8_t)ctx
->opcode
) << 1, 0);
11782 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
11783 ((int8_t)ctx
->opcode
) << 1, 0);
11786 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
11789 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
11790 ((int8_t)ctx
->opcode
) << 3);
11793 check_insn(ctx
, ISA_MIPS32
);
11795 int do_ra
= ctx
->opcode
& (1 << 6);
11796 int do_s0
= ctx
->opcode
& (1 << 5);
11797 int do_s1
= ctx
->opcode
& (1 << 4);
11798 int framesize
= ctx
->opcode
& 0xf;
11800 if (framesize
== 0) {
11803 framesize
= framesize
<< 3;
11806 if (ctx
->opcode
& (1 << 7)) {
11807 gen_mips16_save(ctx
, 0, 0,
11808 do_ra
, do_s0
, do_s1
, framesize
);
11810 gen_mips16_restore(ctx
, 0, 0,
11811 do_ra
, do_s0
, do_s1
, framesize
);
11817 int rz
= xlat(ctx
->opcode
& 0x7);
11819 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
11820 ((ctx
->opcode
>> 5) & 0x7);
11821 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
11825 reg32
= ctx
->opcode
& 0x1f;
11826 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
11829 generate_exception_end(ctx
, EXCP_RI
);
11836 int16_t imm
= (uint8_t) ctx
->opcode
;
11838 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
11843 int16_t imm
= (uint8_t) ctx
->opcode
;
11844 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
11847 #if defined(TARGET_MIPS64)
11849 check_insn(ctx
, ISA_MIPS3
);
11850 check_mips_64(ctx
);
11851 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
11855 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
11858 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
11861 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11864 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
11867 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
11870 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
11873 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
11875 #if defined (TARGET_MIPS64)
11877 check_insn(ctx
, ISA_MIPS3
);
11878 check_mips_64(ctx
);
11879 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
11883 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
11886 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
11889 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
11892 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
11896 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
11899 switch (ctx
->opcode
& 0x3) {
11901 mips32_op
= OPC_ADDU
;
11904 mips32_op
= OPC_SUBU
;
11906 #if defined(TARGET_MIPS64)
11908 mips32_op
= OPC_DADDU
;
11909 check_insn(ctx
, ISA_MIPS3
);
11910 check_mips_64(ctx
);
11913 mips32_op
= OPC_DSUBU
;
11914 check_insn(ctx
, ISA_MIPS3
);
11915 check_mips_64(ctx
);
11919 generate_exception_end(ctx
, EXCP_RI
);
11923 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
11932 int nd
= (ctx
->opcode
>> 7) & 0x1;
11933 int link
= (ctx
->opcode
>> 6) & 0x1;
11934 int ra
= (ctx
->opcode
>> 5) & 0x1;
11937 check_insn(ctx
, ISA_MIPS32
);
11946 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
11951 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
11952 gen_helper_do_semihosting(cpu_env
);
11954 /* XXX: not clear which exception should be raised
11955 * when in debug mode...
11957 check_insn(ctx
, ISA_MIPS32
);
11958 generate_exception_end(ctx
, EXCP_DBp
);
11962 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
11965 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
11968 generate_exception_end(ctx
, EXCP_BREAK
);
11971 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
11974 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
11977 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
11979 #if defined (TARGET_MIPS64)
11981 check_insn(ctx
, ISA_MIPS3
);
11982 check_mips_64(ctx
);
11983 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
11987 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
11990 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
11993 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
11996 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
11999 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
12002 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
12005 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
12008 check_insn(ctx
, ISA_MIPS32
);
12010 case RR_RY_CNVT_ZEB
:
12011 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12013 case RR_RY_CNVT_ZEH
:
12014 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12016 case RR_RY_CNVT_SEB
:
12017 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12019 case RR_RY_CNVT_SEH
:
12020 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12022 #if defined (TARGET_MIPS64)
12023 case RR_RY_CNVT_ZEW
:
12024 check_insn(ctx
, ISA_MIPS64
);
12025 check_mips_64(ctx
);
12026 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12028 case RR_RY_CNVT_SEW
:
12029 check_insn(ctx
, ISA_MIPS64
);
12030 check_mips_64(ctx
);
12031 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
12035 generate_exception_end(ctx
, EXCP_RI
);
12040 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
12042 #if defined (TARGET_MIPS64)
12044 check_insn(ctx
, ISA_MIPS3
);
12045 check_mips_64(ctx
);
12046 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
12049 check_insn(ctx
, ISA_MIPS3
);
12050 check_mips_64(ctx
);
12051 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
12054 check_insn(ctx
, ISA_MIPS3
);
12055 check_mips_64(ctx
);
12056 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
12059 check_insn(ctx
, ISA_MIPS3
);
12060 check_mips_64(ctx
);
12061 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
12065 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
12068 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
12071 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
12074 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
12076 #if defined (TARGET_MIPS64)
12078 check_insn(ctx
, ISA_MIPS3
);
12079 check_mips_64(ctx
);
12080 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
12083 check_insn(ctx
, ISA_MIPS3
);
12084 check_mips_64(ctx
);
12085 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
12088 check_insn(ctx
, ISA_MIPS3
);
12089 check_mips_64(ctx
);
12090 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
12093 check_insn(ctx
, ISA_MIPS3
);
12094 check_mips_64(ctx
);
12095 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
12099 generate_exception_end(ctx
, EXCP_RI
);
12103 case M16_OPC_EXTEND
:
12104 decode_extended_mips16_opc(env
, ctx
);
12107 #if defined(TARGET_MIPS64)
12109 funct
= (ctx
->opcode
>> 8) & 0x7;
12110 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
12114 generate_exception_end(ctx
, EXCP_RI
);
12121 /* microMIPS extension to MIPS32/MIPS64 */
12124 * microMIPS32/microMIPS64 major opcodes
12126 * 1. MIPS Architecture for Programmers Volume II-B:
12127 * The microMIPS32 Instruction Set (Revision 3.05)
12129 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
12131 * 2. MIPS Architecture For Programmers Volume II-A:
12132 * The MIPS64 Instruction Set (Revision 3.51)
12162 POOL32S
= 0x16, /* MIPS64 */
12163 DADDIU32
= 0x17, /* MIPS64 */
12192 /* 0x29 is reserved */
12205 /* 0x31 is reserved */
12218 SD32
= 0x36, /* MIPS64 */
12219 LD32
= 0x37, /* MIPS64 */
12221 /* 0x39 is reserved */
12237 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
12247 /* POOL32A encoding of minor opcode field */
12250 /* These opcodes are distinguished only by bits 9..6; those bits are
12251 * what are recorded below. */
12288 /* The following can be distinguished by their lower 6 bits. */
12298 /* POOL32AXF encoding of minor opcode field extension */
12301 * 1. MIPS Architecture for Programmers Volume II-B:
12302 * The microMIPS32 Instruction Set (Revision 3.05)
12304 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
12306 * 2. MIPS Architecture for Programmers VolumeIV-e:
12307 * The MIPS DSP Application-Specific Extension
12308 * to the microMIPS32 Architecture (Revision 2.34)
12310 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
12325 /* begin of microMIPS32 DSP */
12327 /* bits 13..12 for 0x01 */
12333 /* bits 13..12 for 0x2a */
12339 /* bits 13..12 for 0x32 */
12343 /* end of microMIPS32 DSP */
12345 /* bits 15..12 for 0x2c */
12362 /* bits 15..12 for 0x34 */
12370 /* bits 15..12 for 0x3c */
12372 JR
= 0x0, /* alias */
12380 /* bits 15..12 for 0x05 */
12384 /* bits 15..12 for 0x0d */
12396 /* bits 15..12 for 0x15 */
12402 /* bits 15..12 for 0x1d */
12406 /* bits 15..12 for 0x2d */
12411 /* bits 15..12 for 0x35 */
12418 /* POOL32B encoding of minor opcode field (bits 15..12) */
12434 /* POOL32C encoding of minor opcode field (bits 15..12) */
12442 /* 0xa is reserved */
12449 /* 0x6 is reserved */
12455 /* POOL32F encoding of minor opcode field (bits 5..0) */
12458 /* These are the bit 7..6 values */
12467 /* These are the bit 8..6 values */
12492 MOVZ_FMT_05
= 0x05,
12526 CABS_COND_FMT
= 0x1c, /* MIPS3D */
12533 /* POOL32Fxf encoding of minor opcode extension field */
12571 /* POOL32I encoding of minor opcode field (bits 25..21) */
12601 /* These overlap and are distinguished by bit16 of the instruction */
12610 /* POOL16A encoding of minor opcode field */
12617 /* POOL16B encoding of minor opcode field */
12624 /* POOL16C encoding of minor opcode field */
12644 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
12664 /* POOL16D encoding of minor opcode field */
12671 /* POOL16E encoding of minor opcode field */
12678 static int mmreg (int r
)
12680 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
12685 /* Used for 16-bit store instructions. */
12686 static int mmreg2 (int r
)
12688 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
12693 #define uMIPS_RD(op) ((op >> 7) & 0x7)
12694 #define uMIPS_RS(op) ((op >> 4) & 0x7)
12695 #define uMIPS_RS2(op) uMIPS_RS(op)
12696 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
12697 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
12698 #define uMIPS_RS5(op) (op & 0x1f)
12700 /* Signed immediate */
12701 #define SIMM(op, start, width) \
12702 ((int32_t)(((op >> start) & ((~0U) >> (32-width))) \
12705 /* Zero-extended immediate */
12706 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32-width)))
12708 static void gen_addiur1sp(DisasContext
*ctx
)
12710 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12712 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
12715 static void gen_addiur2(DisasContext
*ctx
)
12717 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
12718 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12719 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12721 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
12724 static void gen_addiusp(DisasContext
*ctx
)
12726 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
12729 if (encoded
<= 1) {
12730 decoded
= 256 + encoded
;
12731 } else if (encoded
<= 255) {
12733 } else if (encoded
<= 509) {
12734 decoded
= encoded
- 512;
12736 decoded
= encoded
- 768;
12739 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
12742 static void gen_addius5(DisasContext
*ctx
)
12744 int imm
= SIMM(ctx
->opcode
, 1, 4);
12745 int rd
= (ctx
->opcode
>> 5) & 0x1f;
12747 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
12750 static void gen_andi16(DisasContext
*ctx
)
12752 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
12753 31, 32, 63, 64, 255, 32768, 65535 };
12754 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
12755 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
12756 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
12758 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
12761 static void gen_ldst_multiple (DisasContext
*ctx
, uint32_t opc
, int reglist
,
12762 int base
, int16_t offset
)
12767 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12768 generate_exception_end(ctx
, EXCP_RI
);
12772 t0
= tcg_temp_new();
12774 gen_base_offset_addr(ctx
, t0
, base
, offset
);
12776 t1
= tcg_const_tl(reglist
);
12777 t2
= tcg_const_i32(ctx
->mem_idx
);
12779 save_cpu_state(ctx
, 1);
12782 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
12785 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
12787 #ifdef TARGET_MIPS64
12789 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
12792 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
12798 tcg_temp_free_i32(t2
);
12802 static void gen_pool16c_insn(DisasContext
*ctx
)
12804 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
12805 int rs
= mmreg(ctx
->opcode
& 0x7);
12807 switch (((ctx
->opcode
) >> 4) & 0x3f) {
12812 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
12818 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
12824 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
12830 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
12837 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12838 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12840 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
12849 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
12850 int offset
= ZIMM(ctx
->opcode
, 0, 4);
12852 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
12859 int reg
= ctx
->opcode
& 0x1f;
12861 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
12867 int reg
= ctx
->opcode
& 0x1f;
12868 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
12869 /* Let normal delay slot handling in our caller take us
12870 to the branch target. */
12875 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
12876 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12880 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
12881 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
12885 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
12889 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
12892 generate_exception_end(ctx
, EXCP_BREAK
);
12895 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
12896 gen_helper_do_semihosting(cpu_env
);
12898 /* XXX: not clear which exception should be raised
12899 * when in debug mode...
12901 check_insn(ctx
, ISA_MIPS32
);
12902 generate_exception_end(ctx
, EXCP_DBp
);
12905 case JRADDIUSP
+ 0:
12906 case JRADDIUSP
+ 1:
12908 int imm
= ZIMM(ctx
->opcode
, 0, 5);
12909 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12910 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12911 /* Let normal delay slot handling in our caller take us
12912 to the branch target. */
12916 generate_exception_end(ctx
, EXCP_RI
);
12921 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
12924 int rd
, rs
, re
, rt
;
12925 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
12926 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
12927 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
12928 rd
= rd_enc
[enc_dest
];
12929 re
= re_enc
[enc_dest
];
12930 rs
= rs_rt_enc
[enc_rs
];
12931 rt
= rs_rt_enc
[enc_rt
];
12933 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
12935 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
12938 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
12940 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
12944 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
12946 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
12947 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
12949 switch (ctx
->opcode
& 0xf) {
12951 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
12954 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
12958 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12959 int offset
= extract32(ctx
->opcode
, 4, 4);
12960 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
12963 case R6_JRC16
: /* JRCADDIUSP */
12964 if ((ctx
->opcode
>> 4) & 1) {
12966 int imm
= extract32(ctx
->opcode
, 5, 5);
12967 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
12968 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
12971 int rs
= extract32(ctx
->opcode
, 5, 5);
12972 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
12975 case MOVEP
... MOVEP_07
:
12976 case MOVEP_0C
... MOVEP_0F
:
12978 int enc_dest
= uMIPS_RD(ctx
->opcode
);
12979 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
12980 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
12981 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
12985 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
12988 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
12992 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
12993 int offset
= extract32(ctx
->opcode
, 4, 4);
12994 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
12997 case JALRC16
: /* BREAK16, SDBBP16 */
12998 switch (ctx
->opcode
& 0x3f) {
13000 case JALRC16
+ 0x20:
13002 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
13007 generate_exception(ctx
, EXCP_BREAK
);
13011 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
13012 gen_helper_do_semihosting(cpu_env
);
13014 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13015 generate_exception(ctx
, EXCP_RI
);
13017 generate_exception(ctx
, EXCP_DBp
);
13024 generate_exception(ctx
, EXCP_RI
);
13029 static void gen_ldxs (DisasContext
*ctx
, int base
, int index
, int rd
)
13031 TCGv t0
= tcg_temp_new();
13032 TCGv t1
= tcg_temp_new();
13034 gen_load_gpr(t0
, base
);
13037 gen_load_gpr(t1
, index
);
13038 tcg_gen_shli_tl(t1
, t1
, 2);
13039 gen_op_addr_add(ctx
, t0
, t1
, t0
);
13042 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13043 gen_store_gpr(t1
, rd
);
13049 static void gen_ldst_pair (DisasContext
*ctx
, uint32_t opc
, int rd
,
13050 int base
, int16_t offset
)
13054 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
13055 generate_exception_end(ctx
, EXCP_RI
);
13059 t0
= tcg_temp_new();
13060 t1
= tcg_temp_new();
13062 gen_base_offset_addr(ctx
, t0
, base
, offset
);
13067 generate_exception_end(ctx
, EXCP_RI
);
13070 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13071 gen_store_gpr(t1
, rd
);
13072 tcg_gen_movi_tl(t1
, 4);
13073 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13074 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
13075 gen_store_gpr(t1
, rd
+1);
13078 gen_load_gpr(t1
, rd
);
13079 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13080 tcg_gen_movi_tl(t1
, 4);
13081 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13082 gen_load_gpr(t1
, rd
+1);
13083 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13085 #ifdef TARGET_MIPS64
13088 generate_exception_end(ctx
, EXCP_RI
);
13091 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13092 gen_store_gpr(t1
, rd
);
13093 tcg_gen_movi_tl(t1
, 8);
13094 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13095 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13096 gen_store_gpr(t1
, rd
+1);
13099 gen_load_gpr(t1
, rd
);
13100 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13101 tcg_gen_movi_tl(t1
, 8);
13102 gen_op_addr_add(ctx
, t0
, t0
, t1
);
13103 gen_load_gpr(t1
, rd
+1);
13104 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
13112 static void gen_pool32axf (CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
13114 int extension
= (ctx
->opcode
>> 6) & 0x3f;
13115 int minor
= (ctx
->opcode
>> 12) & 0xf;
13116 uint32_t mips32_op
;
13118 switch (extension
) {
13120 mips32_op
= OPC_TEQ
;
13123 mips32_op
= OPC_TGE
;
13126 mips32_op
= OPC_TGEU
;
13129 mips32_op
= OPC_TLT
;
13132 mips32_op
= OPC_TLTU
;
13135 mips32_op
= OPC_TNE
;
13137 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
13139 #ifndef CONFIG_USER_ONLY
13142 check_cp0_enabled(ctx
);
13144 /* Treat as NOP. */
13147 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
13151 check_cp0_enabled(ctx
);
13153 TCGv t0
= tcg_temp_new();
13155 gen_load_gpr(t0
, rt
);
13156 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
13162 switch (minor
& 3) {
13164 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13167 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13170 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13173 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13176 goto pool32axf_invalid
;
13180 switch (minor
& 3) {
13182 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13185 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
13188 goto pool32axf_invalid
;
13194 check_insn(ctx
, ISA_MIPS32R6
);
13195 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
13198 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
13201 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
13204 mips32_op
= OPC_CLO
;
13207 mips32_op
= OPC_CLZ
;
13209 check_insn(ctx
, ISA_MIPS32
);
13210 gen_cl(ctx
, mips32_op
, rt
, rs
);
13213 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13214 gen_rdhwr(ctx
, rt
, rs
, 0);
13217 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
13220 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13221 mips32_op
= OPC_MULT
;
13224 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13225 mips32_op
= OPC_MULTU
;
13228 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13229 mips32_op
= OPC_DIV
;
13232 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13233 mips32_op
= OPC_DIVU
;
13236 check_insn(ctx
, ISA_MIPS32
);
13237 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13240 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13241 mips32_op
= OPC_MADD
;
13244 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13245 mips32_op
= OPC_MADDU
;
13248 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13249 mips32_op
= OPC_MSUB
;
13252 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13253 mips32_op
= OPC_MSUBU
;
13255 check_insn(ctx
, ISA_MIPS32
);
13256 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
13259 goto pool32axf_invalid
;
13270 generate_exception_err(ctx
, EXCP_CpU
, 2);
13273 goto pool32axf_invalid
;
13278 case JALR
: /* JALRC */
13279 case JALR_HB
: /* JALRC_HB */
13280 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13281 /* JALRC, JALRC_HB */
13282 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
13284 /* JALR, JALR_HB */
13285 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
13286 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13291 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13292 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
13293 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
13296 goto pool32axf_invalid
;
13302 check_cp0_enabled(ctx
);
13303 check_insn(ctx
, ISA_MIPS32R2
);
13304 gen_load_srsgpr(rs
, rt
);
13307 check_cp0_enabled(ctx
);
13308 check_insn(ctx
, ISA_MIPS32R2
);
13309 gen_store_srsgpr(rs
, rt
);
13312 goto pool32axf_invalid
;
13315 #ifndef CONFIG_USER_ONLY
13319 mips32_op
= OPC_TLBP
;
13322 mips32_op
= OPC_TLBR
;
13325 mips32_op
= OPC_TLBWI
;
13328 mips32_op
= OPC_TLBWR
;
13331 mips32_op
= OPC_TLBINV
;
13334 mips32_op
= OPC_TLBINVF
;
13337 mips32_op
= OPC_WAIT
;
13340 mips32_op
= OPC_DERET
;
13343 mips32_op
= OPC_ERET
;
13345 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
13348 goto pool32axf_invalid
;
13354 check_cp0_enabled(ctx
);
13356 TCGv t0
= tcg_temp_new();
13358 save_cpu_state(ctx
, 1);
13359 gen_helper_di(t0
, cpu_env
);
13360 gen_store_gpr(t0
, rs
);
13361 /* Stop translation as we may have switched the execution mode */
13362 ctx
->bstate
= BS_STOP
;
13367 check_cp0_enabled(ctx
);
13369 TCGv t0
= tcg_temp_new();
13371 save_cpu_state(ctx
, 1);
13372 gen_helper_ei(t0
, cpu_env
);
13373 gen_store_gpr(t0
, rs
);
13374 /* Stop translation as we may have switched the execution mode */
13375 ctx
->bstate
= BS_STOP
;
13380 goto pool32axf_invalid
;
13390 generate_exception_end(ctx
, EXCP_SYSCALL
);
13393 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
13394 gen_helper_do_semihosting(cpu_env
);
13396 check_insn(ctx
, ISA_MIPS32
);
13397 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
13398 generate_exception_end(ctx
, EXCP_RI
);
13400 generate_exception_end(ctx
, EXCP_DBp
);
13405 goto pool32axf_invalid
;
13409 switch (minor
& 3) {
13411 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
13414 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
13417 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
13420 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
13423 goto pool32axf_invalid
;
13427 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13430 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
13433 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
13436 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
13439 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
13442 goto pool32axf_invalid
;
13447 MIPS_INVAL("pool32axf");
13448 generate_exception_end(ctx
, EXCP_RI
);
13453 /* Values for microMIPS fmt field. Variable-width, depending on which
13454 formats the instruction supports. */
13473 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
13475 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
13476 uint32_t mips32_op
;
13478 #define FLOAT_1BIT_FMT(opc, fmt) (fmt << 8) | opc
13479 #define FLOAT_2BIT_FMT(opc, fmt) (fmt << 7) | opc
13480 #define COND_FLOAT_MOV(opc, cond) (cond << 7) | opc
13482 switch (extension
) {
13483 case FLOAT_1BIT_FMT(CFC1
, 0):
13484 mips32_op
= OPC_CFC1
;
13486 case FLOAT_1BIT_FMT(CTC1
, 0):
13487 mips32_op
= OPC_CTC1
;
13489 case FLOAT_1BIT_FMT(MFC1
, 0):
13490 mips32_op
= OPC_MFC1
;
13492 case FLOAT_1BIT_FMT(MTC1
, 0):
13493 mips32_op
= OPC_MTC1
;
13495 case FLOAT_1BIT_FMT(MFHC1
, 0):
13496 mips32_op
= OPC_MFHC1
;
13498 case FLOAT_1BIT_FMT(MTHC1
, 0):
13499 mips32_op
= OPC_MTHC1
;
13501 gen_cp1(ctx
, mips32_op
, rt
, rs
);
13504 /* Reciprocal square root */
13505 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
13506 mips32_op
= OPC_RSQRT_S
;
13508 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
13509 mips32_op
= OPC_RSQRT_D
;
13513 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
13514 mips32_op
= OPC_SQRT_S
;
13516 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
13517 mips32_op
= OPC_SQRT_D
;
13521 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
13522 mips32_op
= OPC_RECIP_S
;
13524 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
13525 mips32_op
= OPC_RECIP_D
;
13529 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
13530 mips32_op
= OPC_FLOOR_L_S
;
13532 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
13533 mips32_op
= OPC_FLOOR_L_D
;
13535 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
13536 mips32_op
= OPC_FLOOR_W_S
;
13538 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
13539 mips32_op
= OPC_FLOOR_W_D
;
13543 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
13544 mips32_op
= OPC_CEIL_L_S
;
13546 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
13547 mips32_op
= OPC_CEIL_L_D
;
13549 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
13550 mips32_op
= OPC_CEIL_W_S
;
13552 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
13553 mips32_op
= OPC_CEIL_W_D
;
13557 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
13558 mips32_op
= OPC_TRUNC_L_S
;
13560 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
13561 mips32_op
= OPC_TRUNC_L_D
;
13563 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
13564 mips32_op
= OPC_TRUNC_W_S
;
13566 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
13567 mips32_op
= OPC_TRUNC_W_D
;
13571 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
13572 mips32_op
= OPC_ROUND_L_S
;
13574 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
13575 mips32_op
= OPC_ROUND_L_D
;
13577 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
13578 mips32_op
= OPC_ROUND_W_S
;
13580 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
13581 mips32_op
= OPC_ROUND_W_D
;
13584 /* Integer to floating-point conversion */
13585 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
13586 mips32_op
= OPC_CVT_L_S
;
13588 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
13589 mips32_op
= OPC_CVT_L_D
;
13591 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
13592 mips32_op
= OPC_CVT_W_S
;
13594 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
13595 mips32_op
= OPC_CVT_W_D
;
13598 /* Paired-foo conversions */
13599 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
13600 mips32_op
= OPC_CVT_S_PL
;
13602 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
13603 mips32_op
= OPC_CVT_S_PU
;
13605 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
13606 mips32_op
= OPC_CVT_PW_PS
;
13608 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
13609 mips32_op
= OPC_CVT_PS_PW
;
13612 /* Floating-point moves */
13613 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
13614 mips32_op
= OPC_MOV_S
;
13616 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
13617 mips32_op
= OPC_MOV_D
;
13619 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
13620 mips32_op
= OPC_MOV_PS
;
13623 /* Absolute value */
13624 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
13625 mips32_op
= OPC_ABS_S
;
13627 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
13628 mips32_op
= OPC_ABS_D
;
13630 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
13631 mips32_op
= OPC_ABS_PS
;
13635 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
13636 mips32_op
= OPC_NEG_S
;
13638 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
13639 mips32_op
= OPC_NEG_D
;
13641 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
13642 mips32_op
= OPC_NEG_PS
;
13645 /* Reciprocal square root step */
13646 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
13647 mips32_op
= OPC_RSQRT1_S
;
13649 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
13650 mips32_op
= OPC_RSQRT1_D
;
13652 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
13653 mips32_op
= OPC_RSQRT1_PS
;
13656 /* Reciprocal step */
13657 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
13658 mips32_op
= OPC_RECIP1_S
;
13660 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
13661 mips32_op
= OPC_RECIP1_S
;
13663 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
13664 mips32_op
= OPC_RECIP1_PS
;
13667 /* Conversions from double */
13668 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
13669 mips32_op
= OPC_CVT_D_S
;
13671 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
13672 mips32_op
= OPC_CVT_D_W
;
13674 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
13675 mips32_op
= OPC_CVT_D_L
;
13678 /* Conversions from single */
13679 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
13680 mips32_op
= OPC_CVT_S_D
;
13682 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
13683 mips32_op
= OPC_CVT_S_W
;
13685 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
13686 mips32_op
= OPC_CVT_S_L
;
13688 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
13691 /* Conditional moves on floating-point codes */
13692 case COND_FLOAT_MOV(MOVT
, 0):
13693 case COND_FLOAT_MOV(MOVT
, 1):
13694 case COND_FLOAT_MOV(MOVT
, 2):
13695 case COND_FLOAT_MOV(MOVT
, 3):
13696 case COND_FLOAT_MOV(MOVT
, 4):
13697 case COND_FLOAT_MOV(MOVT
, 5):
13698 case COND_FLOAT_MOV(MOVT
, 6):
13699 case COND_FLOAT_MOV(MOVT
, 7):
13700 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13701 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
13703 case COND_FLOAT_MOV(MOVF
, 0):
13704 case COND_FLOAT_MOV(MOVF
, 1):
13705 case COND_FLOAT_MOV(MOVF
, 2):
13706 case COND_FLOAT_MOV(MOVF
, 3):
13707 case COND_FLOAT_MOV(MOVF
, 4):
13708 case COND_FLOAT_MOV(MOVF
, 5):
13709 case COND_FLOAT_MOV(MOVF
, 6):
13710 case COND_FLOAT_MOV(MOVF
, 7):
13711 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13712 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
13715 MIPS_INVAL("pool32fxf");
13716 generate_exception_end(ctx
, EXCP_RI
);
13721 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13725 int rt
, rs
, rd
, rr
;
13727 uint32_t op
, minor
, mips32_op
;
13728 uint32_t cond
, fmt
, cc
;
13730 insn
= cpu_lduw_code(env
, ctx
->pc
+ 2);
13731 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
13733 rt
= (ctx
->opcode
>> 21) & 0x1f;
13734 rs
= (ctx
->opcode
>> 16) & 0x1f;
13735 rd
= (ctx
->opcode
>> 11) & 0x1f;
13736 rr
= (ctx
->opcode
>> 6) & 0x1f;
13737 imm
= (int16_t) ctx
->opcode
;
13739 op
= (ctx
->opcode
>> 26) & 0x3f;
13742 minor
= ctx
->opcode
& 0x3f;
13745 minor
= (ctx
->opcode
>> 6) & 0xf;
13748 mips32_op
= OPC_SLL
;
13751 mips32_op
= OPC_SRA
;
13754 mips32_op
= OPC_SRL
;
13757 mips32_op
= OPC_ROTR
;
13759 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
13762 check_insn(ctx
, ISA_MIPS32R6
);
13763 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
13766 check_insn(ctx
, ISA_MIPS32R6
);
13767 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
13770 check_insn(ctx
, ISA_MIPS32R6
);
13771 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
13774 goto pool32a_invalid
;
13778 minor
= (ctx
->opcode
>> 6) & 0xf;
13782 mips32_op
= OPC_ADD
;
13785 mips32_op
= OPC_ADDU
;
13788 mips32_op
= OPC_SUB
;
13791 mips32_op
= OPC_SUBU
;
13794 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13795 mips32_op
= OPC_MUL
;
13797 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
13801 mips32_op
= OPC_SLLV
;
13804 mips32_op
= OPC_SRLV
;
13807 mips32_op
= OPC_SRAV
;
13810 mips32_op
= OPC_ROTRV
;
13812 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
13814 /* Logical operations */
13816 mips32_op
= OPC_AND
;
13819 mips32_op
= OPC_OR
;
13822 mips32_op
= OPC_NOR
;
13825 mips32_op
= OPC_XOR
;
13827 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
13829 /* Set less than */
13831 mips32_op
= OPC_SLT
;
13834 mips32_op
= OPC_SLTU
;
13836 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
13839 goto pool32a_invalid
;
13843 minor
= (ctx
->opcode
>> 6) & 0xf;
13845 /* Conditional moves */
13846 case MOVN
: /* MUL */
13847 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13849 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
13852 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
13855 case MOVZ
: /* MUH */
13856 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13858 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
13861 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
13865 check_insn(ctx
, ISA_MIPS32R6
);
13866 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
13869 check_insn(ctx
, ISA_MIPS32R6
);
13870 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
13872 case LWXS
: /* DIV */
13873 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
13875 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
13878 gen_ldxs(ctx
, rs
, rt
, rd
);
13882 check_insn(ctx
, ISA_MIPS32R6
);
13883 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
13886 check_insn(ctx
, ISA_MIPS32R6
);
13887 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
13890 check_insn(ctx
, ISA_MIPS32R6
);
13891 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
13894 goto pool32a_invalid
;
13898 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
13901 check_insn(ctx
, ISA_MIPS32R6
);
13902 gen_lsa(ctx
, OPC_LSA
, rd
, rs
, rt
,
13903 extract32(ctx
->opcode
, 9, 2));
13906 check_insn(ctx
, ISA_MIPS32R6
);
13907 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
,
13908 extract32(ctx
->opcode
, 9, 2));
13911 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
13914 gen_pool32axf(env
, ctx
, rt
, rs
);
13917 generate_exception_end(ctx
, EXCP_BREAK
);
13920 check_insn(ctx
, ISA_MIPS32R6
);
13921 generate_exception_end(ctx
, EXCP_RI
);
13925 MIPS_INVAL("pool32a");
13926 generate_exception_end(ctx
, EXCP_RI
);
13931 minor
= (ctx
->opcode
>> 12) & 0xf;
13934 check_cp0_enabled(ctx
);
13935 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
13936 gen_cache_operation(ctx
, rt
, rs
, imm
);
13941 /* COP2: Not implemented. */
13942 generate_exception_err(ctx
, EXCP_CpU
, 2);
13944 #ifdef TARGET_MIPS64
13947 check_insn(ctx
, ISA_MIPS3
);
13948 check_mips_64(ctx
);
13953 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13955 #ifdef TARGET_MIPS64
13958 check_insn(ctx
, ISA_MIPS3
);
13959 check_mips_64(ctx
);
13964 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
13967 MIPS_INVAL("pool32b");
13968 generate_exception_end(ctx
, EXCP_RI
);
13973 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
13974 minor
= ctx
->opcode
& 0x3f;
13975 check_cp1_enabled(ctx
);
13978 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13979 mips32_op
= OPC_ALNV_PS
;
13982 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13983 mips32_op
= OPC_MADD_S
;
13986 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13987 mips32_op
= OPC_MADD_D
;
13990 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13991 mips32_op
= OPC_MADD_PS
;
13994 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13995 mips32_op
= OPC_MSUB_S
;
13998 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
13999 mips32_op
= OPC_MSUB_D
;
14002 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14003 mips32_op
= OPC_MSUB_PS
;
14006 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14007 mips32_op
= OPC_NMADD_S
;
14010 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14011 mips32_op
= OPC_NMADD_D
;
14014 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14015 mips32_op
= OPC_NMADD_PS
;
14018 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14019 mips32_op
= OPC_NMSUB_S
;
14022 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14023 mips32_op
= OPC_NMSUB_D
;
14026 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14027 mips32_op
= OPC_NMSUB_PS
;
14029 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
14031 case CABS_COND_FMT
:
14032 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14033 cond
= (ctx
->opcode
>> 6) & 0xf;
14034 cc
= (ctx
->opcode
>> 13) & 0x7;
14035 fmt
= (ctx
->opcode
>> 10) & 0x3;
14038 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
14041 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
14044 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
14047 goto pool32f_invalid
;
14051 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14052 cond
= (ctx
->opcode
>> 6) & 0xf;
14053 cc
= (ctx
->opcode
>> 13) & 0x7;
14054 fmt
= (ctx
->opcode
>> 10) & 0x3;
14057 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
14060 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
14063 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
14066 goto pool32f_invalid
;
14070 check_insn(ctx
, ISA_MIPS32R6
);
14071 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14074 check_insn(ctx
, ISA_MIPS32R6
);
14075 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
14078 gen_pool32fxf(ctx
, rt
, rs
);
14082 switch ((ctx
->opcode
>> 6) & 0x7) {
14084 mips32_op
= OPC_PLL_PS
;
14087 mips32_op
= OPC_PLU_PS
;
14090 mips32_op
= OPC_PUL_PS
;
14093 mips32_op
= OPC_PUU_PS
;
14096 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14097 mips32_op
= OPC_CVT_PS_S
;
14099 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14102 goto pool32f_invalid
;
14106 check_insn(ctx
, ISA_MIPS32R6
);
14107 switch ((ctx
->opcode
>> 9) & 0x3) {
14109 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
14112 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
14115 goto pool32f_invalid
;
14120 switch ((ctx
->opcode
>> 6) & 0x7) {
14122 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14123 mips32_op
= OPC_LWXC1
;
14126 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14127 mips32_op
= OPC_SWXC1
;
14130 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14131 mips32_op
= OPC_LDXC1
;
14134 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14135 mips32_op
= OPC_SDXC1
;
14138 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14139 mips32_op
= OPC_LUXC1
;
14142 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14143 mips32_op
= OPC_SUXC1
;
14145 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
14148 goto pool32f_invalid
;
14152 check_insn(ctx
, ISA_MIPS32R6
);
14153 switch ((ctx
->opcode
>> 9) & 0x3) {
14155 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
14158 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
14161 goto pool32f_invalid
;
14166 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14167 fmt
= (ctx
->opcode
>> 9) & 0x3;
14168 switch ((ctx
->opcode
>> 6) & 0x7) {
14172 mips32_op
= OPC_RSQRT2_S
;
14175 mips32_op
= OPC_RSQRT2_D
;
14178 mips32_op
= OPC_RSQRT2_PS
;
14181 goto pool32f_invalid
;
14187 mips32_op
= OPC_RECIP2_S
;
14190 mips32_op
= OPC_RECIP2_D
;
14193 mips32_op
= OPC_RECIP2_PS
;
14196 goto pool32f_invalid
;
14200 mips32_op
= OPC_ADDR_PS
;
14203 mips32_op
= OPC_MULR_PS
;
14205 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14208 goto pool32f_invalid
;
14212 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
14213 cc
= (ctx
->opcode
>> 13) & 0x7;
14214 fmt
= (ctx
->opcode
>> 9) & 0x3;
14215 switch ((ctx
->opcode
>> 6) & 0x7) {
14216 case MOVF_FMT
: /* RINT_FMT */
14217 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14221 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
14224 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
14227 goto pool32f_invalid
;
14233 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
14236 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
14240 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
14243 goto pool32f_invalid
;
14247 case MOVT_FMT
: /* CLASS_FMT */
14248 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14252 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
14255 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
14258 goto pool32f_invalid
;
14264 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
14267 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
14271 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
14274 goto pool32f_invalid
;
14279 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14282 goto pool32f_invalid
;
14285 #define FINSN_3ARG_SDPS(prfx) \
14286 switch ((ctx->opcode >> 8) & 0x3) { \
14288 mips32_op = OPC_##prfx##_S; \
14291 mips32_op = OPC_##prfx##_D; \
14293 case FMT_SDPS_PS: \
14295 mips32_op = OPC_##prfx##_PS; \
14298 goto pool32f_invalid; \
14301 check_insn(ctx
, ISA_MIPS32R6
);
14302 switch ((ctx
->opcode
>> 9) & 0x3) {
14304 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
14307 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
14310 goto pool32f_invalid
;
14314 check_insn(ctx
, ISA_MIPS32R6
);
14315 switch ((ctx
->opcode
>> 9) & 0x3) {
14317 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
14320 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
14323 goto pool32f_invalid
;
14327 /* regular FP ops */
14328 switch ((ctx
->opcode
>> 6) & 0x3) {
14330 FINSN_3ARG_SDPS(ADD
);
14333 FINSN_3ARG_SDPS(SUB
);
14336 FINSN_3ARG_SDPS(MUL
);
14339 fmt
= (ctx
->opcode
>> 8) & 0x3;
14341 mips32_op
= OPC_DIV_D
;
14342 } else if (fmt
== 0) {
14343 mips32_op
= OPC_DIV_S
;
14345 goto pool32f_invalid
;
14349 goto pool32f_invalid
;
14354 switch ((ctx
->opcode
>> 6) & 0x7) {
14355 case MOVN_FMT
: /* SELNEZ_FMT */
14356 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14358 switch ((ctx
->opcode
>> 9) & 0x3) {
14360 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
14363 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
14366 goto pool32f_invalid
;
14370 FINSN_3ARG_SDPS(MOVN
);
14374 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14375 FINSN_3ARG_SDPS(MOVN
);
14377 case MOVZ_FMT
: /* SELEQZ_FMT */
14378 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14380 switch ((ctx
->opcode
>> 9) & 0x3) {
14382 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
14385 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
14388 goto pool32f_invalid
;
14392 FINSN_3ARG_SDPS(MOVZ
);
14396 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14397 FINSN_3ARG_SDPS(MOVZ
);
14400 check_insn(ctx
, ISA_MIPS32R6
);
14401 switch ((ctx
->opcode
>> 9) & 0x3) {
14403 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
14406 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
14409 goto pool32f_invalid
;
14413 check_insn(ctx
, ISA_MIPS32R6
);
14414 switch ((ctx
->opcode
>> 9) & 0x3) {
14416 mips32_op
= OPC_MADDF_S
;
14419 mips32_op
= OPC_MADDF_D
;
14422 goto pool32f_invalid
;
14426 check_insn(ctx
, ISA_MIPS32R6
);
14427 switch ((ctx
->opcode
>> 9) & 0x3) {
14429 mips32_op
= OPC_MSUBF_S
;
14432 mips32_op
= OPC_MSUBF_D
;
14435 goto pool32f_invalid
;
14439 goto pool32f_invalid
;
14443 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
14447 MIPS_INVAL("pool32f");
14448 generate_exception_end(ctx
, EXCP_RI
);
14452 generate_exception_err(ctx
, EXCP_CpU
, 1);
14456 minor
= (ctx
->opcode
>> 21) & 0x1f;
14459 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14460 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
14463 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14464 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
14465 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14469 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
14470 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14473 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14474 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
14477 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14478 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
14479 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14482 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14483 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
14484 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14487 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14488 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
14491 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14492 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
14496 case TLTI
: /* BC1EQZC */
14497 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14499 check_cp1_enabled(ctx
);
14500 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
14503 mips32_op
= OPC_TLTI
;
14507 case TGEI
: /* BC1NEZC */
14508 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14510 check_cp1_enabled(ctx
);
14511 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
14514 mips32_op
= OPC_TGEI
;
14519 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14520 mips32_op
= OPC_TLTIU
;
14523 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14524 mips32_op
= OPC_TGEIU
;
14526 case TNEI
: /* SYNCI */
14527 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14529 /* Break the TB to be able to sync copied instructions
14531 ctx
->bstate
= BS_STOP
;
14534 mips32_op
= OPC_TNEI
;
14539 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14540 mips32_op
= OPC_TEQI
;
14542 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
14547 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14548 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
14549 4, rs
, 0, imm
<< 1, 0);
14550 /* Compact branches don't have a delay slot, so just let
14551 the normal delay slot handling take us to the branch
14555 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14556 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
14559 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14560 /* Break the TB to be able to sync copied instructions
14562 ctx
->bstate
= BS_STOP
;
14566 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14567 /* COP2: Not implemented. */
14568 generate_exception_err(ctx
, EXCP_CpU
, 2);
14571 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14572 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
14575 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14576 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
14579 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14580 mips32_op
= OPC_BC1FANY4
;
14583 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14584 mips32_op
= OPC_BC1TANY4
;
14587 check_insn(ctx
, ASE_MIPS3D
);
14590 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
14591 check_cp1_enabled(ctx
);
14592 gen_compute_branch1(ctx
, mips32_op
,
14593 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
14595 generate_exception_err(ctx
, EXCP_CpU
, 1);
14600 /* MIPS DSP: not implemented */
14603 MIPS_INVAL("pool32i");
14604 generate_exception_end(ctx
, EXCP_RI
);
14609 minor
= (ctx
->opcode
>> 12) & 0xf;
14610 offset
= sextract32(ctx
->opcode
, 0,
14611 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 9 : 12);
14614 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14615 mips32_op
= OPC_LWL
;
14618 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14619 mips32_op
= OPC_SWL
;
14622 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14623 mips32_op
= OPC_LWR
;
14626 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14627 mips32_op
= OPC_SWR
;
14629 #if defined(TARGET_MIPS64)
14631 check_insn(ctx
, ISA_MIPS3
);
14632 check_mips_64(ctx
);
14633 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14634 mips32_op
= OPC_LDL
;
14637 check_insn(ctx
, ISA_MIPS3
);
14638 check_mips_64(ctx
);
14639 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14640 mips32_op
= OPC_SDL
;
14643 check_insn(ctx
, ISA_MIPS3
);
14644 check_mips_64(ctx
);
14645 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14646 mips32_op
= OPC_LDR
;
14649 check_insn(ctx
, ISA_MIPS3
);
14650 check_mips_64(ctx
);
14651 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14652 mips32_op
= OPC_SDR
;
14655 check_insn(ctx
, ISA_MIPS3
);
14656 check_mips_64(ctx
);
14657 mips32_op
= OPC_LWU
;
14660 check_insn(ctx
, ISA_MIPS3
);
14661 check_mips_64(ctx
);
14662 mips32_op
= OPC_LLD
;
14666 mips32_op
= OPC_LL
;
14669 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
14672 gen_st(ctx
, mips32_op
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
14675 gen_st_cond(ctx
, OPC_SC
, rt
, rs
, offset
);
14677 #if defined(TARGET_MIPS64)
14679 check_insn(ctx
, ISA_MIPS3
);
14680 check_mips_64(ctx
);
14681 gen_st_cond(ctx
, OPC_SCD
, rt
, rs
, offset
);
14685 /* Treat as no-op */
14686 if ((ctx
->insn_flags
& ISA_MIPS32R6
) && (rt
>= 24)) {
14687 /* hint codes 24-31 are reserved and signal RI */
14688 generate_exception(ctx
, EXCP_RI
);
14692 MIPS_INVAL("pool32c");
14693 generate_exception_end(ctx
, EXCP_RI
);
14697 case ADDI32
: /* AUI, LUI */
14698 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14700 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
14703 mips32_op
= OPC_ADDI
;
14708 mips32_op
= OPC_ADDIU
;
14710 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14713 /* Logical operations */
14715 mips32_op
= OPC_ORI
;
14718 mips32_op
= OPC_XORI
;
14721 mips32_op
= OPC_ANDI
;
14723 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14726 /* Set less than immediate */
14728 mips32_op
= OPC_SLTI
;
14731 mips32_op
= OPC_SLTIU
;
14733 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
14736 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
14737 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
14738 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
14739 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14741 case JALS32
: /* BOVC, BEQC, BEQZALC */
14742 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14745 mips32_op
= OPC_BOVC
;
14746 } else if (rs
< rt
&& rs
== 0) {
14748 mips32_op
= OPC_BEQZALC
;
14751 mips32_op
= OPC_BEQC
;
14753 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14756 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
14757 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
14758 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14761 case BEQ32
: /* BC */
14762 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14764 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
14765 sextract32(ctx
->opcode
<< 1, 0, 27));
14768 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
14771 case BNE32
: /* BALC */
14772 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14774 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
14775 sextract32(ctx
->opcode
<< 1, 0, 27));
14778 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
14781 case J32
: /* BGTZC, BLTZC, BLTC */
14782 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14783 if (rs
== 0 && rt
!= 0) {
14785 mips32_op
= OPC_BGTZC
;
14786 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14788 mips32_op
= OPC_BLTZC
;
14791 mips32_op
= OPC_BLTC
;
14793 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14796 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
14797 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14800 case JAL32
: /* BLEZC, BGEZC, BGEC */
14801 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14802 if (rs
== 0 && rt
!= 0) {
14804 mips32_op
= OPC_BLEZC
;
14805 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14807 mips32_op
= OPC_BGEZC
;
14810 mips32_op
= OPC_BGEC
;
14812 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14815 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
14816 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
14817 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
14820 /* Floating point (COP1) */
14822 mips32_op
= OPC_LWC1
;
14825 mips32_op
= OPC_LDC1
;
14828 mips32_op
= OPC_SWC1
;
14831 mips32_op
= OPC_SDC1
;
14833 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
14835 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14836 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
14837 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
14838 switch ((ctx
->opcode
>> 16) & 0x1f) {
14839 case ADDIUPC_00
... ADDIUPC_07
:
14840 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->pc
& ~0x3, rt
);
14843 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->pc
, rt
);
14846 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->pc
, rt
);
14848 case LWPC_08
... LWPC_0F
:
14849 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->pc
& ~0x3, rt
);
14852 generate_exception(ctx
, EXCP_RI
);
14857 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
14858 int offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
14860 gen_addiupc(ctx
, reg
, offset
, 0, 0);
14863 case BNVC
: /* BNEC, BNEZALC */
14864 check_insn(ctx
, ISA_MIPS32R6
);
14867 mips32_op
= OPC_BNVC
;
14868 } else if (rs
< rt
&& rs
== 0) {
14870 mips32_op
= OPC_BNEZALC
;
14873 mips32_op
= OPC_BNEC
;
14875 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14877 case R6_BNEZC
: /* JIALC */
14878 check_insn(ctx
, ISA_MIPS32R6
);
14881 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
14882 sextract32(ctx
->opcode
<< 1, 0, 22));
14885 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
14888 case R6_BEQZC
: /* JIC */
14889 check_insn(ctx
, ISA_MIPS32R6
);
14892 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
14893 sextract32(ctx
->opcode
<< 1, 0, 22));
14896 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
14899 case BLEZALC
: /* BGEZALC, BGEUC */
14900 check_insn(ctx
, ISA_MIPS32R6
);
14901 if (rs
== 0 && rt
!= 0) {
14903 mips32_op
= OPC_BLEZALC
;
14904 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14906 mips32_op
= OPC_BGEZALC
;
14909 mips32_op
= OPC_BGEUC
;
14911 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14913 case BGTZALC
: /* BLTZALC, BLTUC */
14914 check_insn(ctx
, ISA_MIPS32R6
);
14915 if (rs
== 0 && rt
!= 0) {
14917 mips32_op
= OPC_BGTZALC
;
14918 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
14920 mips32_op
= OPC_BLTZALC
;
14923 mips32_op
= OPC_BLTUC
;
14925 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
14927 /* Loads and stores */
14929 mips32_op
= OPC_LB
;
14932 mips32_op
= OPC_LBU
;
14935 mips32_op
= OPC_LH
;
14938 mips32_op
= OPC_LHU
;
14941 mips32_op
= OPC_LW
;
14943 #ifdef TARGET_MIPS64
14945 check_insn(ctx
, ISA_MIPS3
);
14946 check_mips_64(ctx
);
14947 mips32_op
= OPC_LD
;
14950 check_insn(ctx
, ISA_MIPS3
);
14951 check_mips_64(ctx
);
14952 mips32_op
= OPC_SD
;
14956 mips32_op
= OPC_SB
;
14959 mips32_op
= OPC_SH
;
14962 mips32_op
= OPC_SW
;
14965 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
14968 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
14971 generate_exception_end(ctx
, EXCP_RI
);
14976 static int decode_micromips_opc (CPUMIPSState
*env
, DisasContext
*ctx
)
14980 /* make sure instructions are on a halfword boundary */
14981 if (ctx
->pc
& 0x1) {
14982 env
->CP0_BadVAddr
= ctx
->pc
;
14983 generate_exception_end(ctx
, EXCP_AdEL
);
14987 op
= (ctx
->opcode
>> 10) & 0x3f;
14988 /* Enforce properly-sized instructions in a delay slot */
14989 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
14990 switch (op
& 0x7) { /* MSB-3..MSB-5 */
14992 /* POOL32A, POOL32B, POOL32I, POOL32C */
14994 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
14996 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
14998 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
15000 /* LB32, LH32, LWC132, LDC132, LW32 */
15001 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
15002 generate_exception_end(ctx
, EXCP_RI
);
15007 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
15009 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
15011 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
15012 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
15013 generate_exception_end(ctx
, EXCP_RI
);
15023 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15024 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
15025 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
15028 switch (ctx
->opcode
& 0x1) {
15036 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15037 /* In the Release 6 the register number location in
15038 * the instruction encoding has changed.
15040 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
15042 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
15048 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15049 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15050 int amount
= (ctx
->opcode
>> 1) & 0x7;
15052 amount
= amount
== 0 ? 8 : amount
;
15054 switch (ctx
->opcode
& 0x1) {
15063 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
15067 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
15068 gen_pool16c_r6_insn(ctx
);
15070 gen_pool16c_insn(ctx
);
15075 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15076 int rb
= 28; /* GP */
15077 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
15079 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15083 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
15084 if (ctx
->opcode
& 1) {
15085 generate_exception_end(ctx
, EXCP_RI
);
15088 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15089 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15090 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
15091 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15096 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15097 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15098 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15099 offset
= (offset
== 0xf ? -1 : offset
);
15101 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
15106 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15107 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15108 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15110 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
15115 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15116 int rb
= 29; /* SP */
15117 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15119 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15124 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15125 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15126 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15128 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
15133 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15134 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15135 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
15137 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
15142 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15143 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15144 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
15146 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
15151 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15152 int rb
= 29; /* SP */
15153 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
15155 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15160 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
15161 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
15162 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
15164 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
15169 int rd
= uMIPS_RD5(ctx
->opcode
);
15170 int rs
= uMIPS_RS5(ctx
->opcode
);
15172 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
15179 switch (ctx
->opcode
& 0x1) {
15189 switch (ctx
->opcode
& 0x1) {
15194 gen_addiur1sp(ctx
);
15198 case B16
: /* BC16 */
15199 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
15200 sextract32(ctx
->opcode
, 0, 10) << 1,
15201 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15203 case BNEZ16
: /* BNEZC16 */
15204 case BEQZ16
: /* BEQZC16 */
15205 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
15206 mmreg(uMIPS_RD(ctx
->opcode
)),
15207 0, sextract32(ctx
->opcode
, 0, 7) << 1,
15208 (ctx
->insn_flags
& ISA_MIPS32R6
) ? 0 : 4);
15213 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
15214 int imm
= ZIMM(ctx
->opcode
, 0, 7);
15216 imm
= (imm
== 0x7f ? -1 : imm
);
15217 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
15223 generate_exception_end(ctx
, EXCP_RI
);
15226 decode_micromips32_opc(env
, ctx
);
15233 /* SmartMIPS extension to MIPS32 */
15235 #if defined(TARGET_MIPS64)
15237 /* MDMX extension to MIPS64 */
15241 /* MIPSDSP functions. */
15242 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
15243 int rd
, int base
, int offset
)
15248 t0
= tcg_temp_new();
15251 gen_load_gpr(t0
, offset
);
15252 } else if (offset
== 0) {
15253 gen_load_gpr(t0
, base
);
15255 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
15260 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
15261 gen_store_gpr(t0
, rd
);
15264 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
15265 gen_store_gpr(t0
, rd
);
15268 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
15269 gen_store_gpr(t0
, rd
);
15271 #if defined(TARGET_MIPS64)
15273 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
15274 gen_store_gpr(t0
, rd
);
15281 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15282 int ret
, int v1
, int v2
)
15288 /* Treat as NOP. */
15292 v1_t
= tcg_temp_new();
15293 v2_t
= tcg_temp_new();
15295 gen_load_gpr(v1_t
, v1
);
15296 gen_load_gpr(v2_t
, v2
);
15299 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
15300 case OPC_MULT_G_2E
:
15304 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15306 case OPC_ADDUH_R_QB
:
15307 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15310 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15312 case OPC_ADDQH_R_PH
:
15313 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15316 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15318 case OPC_ADDQH_R_W
:
15319 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15322 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15324 case OPC_SUBUH_R_QB
:
15325 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15328 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15330 case OPC_SUBQH_R_PH
:
15331 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15334 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15336 case OPC_SUBQH_R_W
:
15337 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15341 case OPC_ABSQ_S_PH_DSP
:
15343 case OPC_ABSQ_S_QB
:
15345 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
15347 case OPC_ABSQ_S_PH
:
15349 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
15353 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
15355 case OPC_PRECEQ_W_PHL
:
15357 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
15358 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15360 case OPC_PRECEQ_W_PHR
:
15362 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
15363 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
15364 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
15366 case OPC_PRECEQU_PH_QBL
:
15368 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
15370 case OPC_PRECEQU_PH_QBR
:
15372 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
15374 case OPC_PRECEQU_PH_QBLA
:
15376 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
15378 case OPC_PRECEQU_PH_QBRA
:
15380 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
15382 case OPC_PRECEU_PH_QBL
:
15384 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
15386 case OPC_PRECEU_PH_QBR
:
15388 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
15390 case OPC_PRECEU_PH_QBLA
:
15392 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
15394 case OPC_PRECEU_PH_QBRA
:
15396 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
15400 case OPC_ADDU_QB_DSP
:
15404 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15406 case OPC_ADDQ_S_PH
:
15408 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15412 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15416 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15418 case OPC_ADDU_S_QB
:
15420 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15424 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15426 case OPC_ADDU_S_PH
:
15428 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15432 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15434 case OPC_SUBQ_S_PH
:
15436 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15440 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15444 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15446 case OPC_SUBU_S_QB
:
15448 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15452 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15454 case OPC_SUBU_S_PH
:
15456 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15460 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15464 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15468 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
15470 case OPC_RADDU_W_QB
:
15472 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
15476 case OPC_CMPU_EQ_QB_DSP
:
15478 case OPC_PRECR_QB_PH
:
15480 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15482 case OPC_PRECRQ_QB_PH
:
15484 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15486 case OPC_PRECR_SRA_PH_W
:
15489 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15490 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15492 tcg_temp_free_i32(sa_t
);
15495 case OPC_PRECR_SRA_R_PH_W
:
15498 TCGv_i32 sa_t
= tcg_const_i32(v2
);
15499 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
15501 tcg_temp_free_i32(sa_t
);
15504 case OPC_PRECRQ_PH_W
:
15506 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15508 case OPC_PRECRQ_RS_PH_W
:
15510 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15512 case OPC_PRECRQU_S_QB_PH
:
15514 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15518 #ifdef TARGET_MIPS64
15519 case OPC_ABSQ_S_QH_DSP
:
15521 case OPC_PRECEQ_L_PWL
:
15523 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
15525 case OPC_PRECEQ_L_PWR
:
15527 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
15529 case OPC_PRECEQ_PW_QHL
:
15531 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
15533 case OPC_PRECEQ_PW_QHR
:
15535 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
15537 case OPC_PRECEQ_PW_QHLA
:
15539 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
15541 case OPC_PRECEQ_PW_QHRA
:
15543 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
15545 case OPC_PRECEQU_QH_OBL
:
15547 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
15549 case OPC_PRECEQU_QH_OBR
:
15551 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
15553 case OPC_PRECEQU_QH_OBLA
:
15555 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
15557 case OPC_PRECEQU_QH_OBRA
:
15559 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
15561 case OPC_PRECEU_QH_OBL
:
15563 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
15565 case OPC_PRECEU_QH_OBR
:
15567 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
15569 case OPC_PRECEU_QH_OBLA
:
15571 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
15573 case OPC_PRECEU_QH_OBRA
:
15575 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
15577 case OPC_ABSQ_S_OB
:
15579 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
15581 case OPC_ABSQ_S_PW
:
15583 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
15585 case OPC_ABSQ_S_QH
:
15587 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
15591 case OPC_ADDU_OB_DSP
:
15593 case OPC_RADDU_L_OB
:
15595 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
15599 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15601 case OPC_SUBQ_S_PW
:
15603 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15607 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15609 case OPC_SUBQ_S_QH
:
15611 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15615 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15617 case OPC_SUBU_S_OB
:
15619 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15623 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15625 case OPC_SUBU_S_QH
:
15627 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15631 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15633 case OPC_SUBUH_R_OB
:
15635 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15639 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15641 case OPC_ADDQ_S_PW
:
15643 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15647 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15649 case OPC_ADDQ_S_QH
:
15651 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15655 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15657 case OPC_ADDU_S_OB
:
15659 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15663 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15665 case OPC_ADDU_S_QH
:
15667 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15671 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15673 case OPC_ADDUH_R_OB
:
15675 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
15679 case OPC_CMPU_EQ_OB_DSP
:
15681 case OPC_PRECR_OB_QH
:
15683 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15685 case OPC_PRECR_SRA_QH_PW
:
15688 TCGv_i32 ret_t
= tcg_const_i32(ret
);
15689 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
15690 tcg_temp_free_i32(ret_t
);
15693 case OPC_PRECR_SRA_R_QH_PW
:
15696 TCGv_i32 sa_v
= tcg_const_i32(ret
);
15697 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
15698 tcg_temp_free_i32(sa_v
);
15701 case OPC_PRECRQ_OB_QH
:
15703 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
15705 case OPC_PRECRQ_PW_L
:
15707 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
15709 case OPC_PRECRQ_QH_PW
:
15711 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
15713 case OPC_PRECRQ_RS_QH_PW
:
15715 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15717 case OPC_PRECRQU_S_OB_QH
:
15719 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15726 tcg_temp_free(v1_t
);
15727 tcg_temp_free(v2_t
);
15730 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
15731 int ret
, int v1
, int v2
)
15739 /* Treat as NOP. */
15743 t0
= tcg_temp_new();
15744 v1_t
= tcg_temp_new();
15745 v2_t
= tcg_temp_new();
15747 tcg_gen_movi_tl(t0
, v1
);
15748 gen_load_gpr(v1_t
, v1
);
15749 gen_load_gpr(v2_t
, v2
);
15752 case OPC_SHLL_QB_DSP
:
15754 op2
= MASK_SHLL_QB(ctx
->opcode
);
15758 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15762 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15766 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15770 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15772 case OPC_SHLL_S_PH
:
15774 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15776 case OPC_SHLLV_S_PH
:
15778 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15782 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
15784 case OPC_SHLLV_S_W
:
15786 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
15790 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
15794 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15798 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
15802 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15806 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
15808 case OPC_SHRA_R_QB
:
15810 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
15814 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15816 case OPC_SHRAV_R_QB
:
15818 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
15822 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
15824 case OPC_SHRA_R_PH
:
15826 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
15830 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15832 case OPC_SHRAV_R_PH
:
15834 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
15838 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
15840 case OPC_SHRAV_R_W
:
15842 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
15844 default: /* Invalid */
15845 MIPS_INVAL("MASK SHLL.QB");
15846 generate_exception_end(ctx
, EXCP_RI
);
15851 #ifdef TARGET_MIPS64
15852 case OPC_SHLL_OB_DSP
:
15853 op2
= MASK_SHLL_OB(ctx
->opcode
);
15857 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15861 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15863 case OPC_SHLL_S_PW
:
15865 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15867 case OPC_SHLLV_S_PW
:
15869 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15873 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15877 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15881 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15885 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15887 case OPC_SHLL_S_QH
:
15889 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
15891 case OPC_SHLLV_S_QH
:
15893 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
15897 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
15901 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15903 case OPC_SHRA_R_OB
:
15905 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
15907 case OPC_SHRAV_R_OB
:
15909 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15913 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
15917 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15919 case OPC_SHRA_R_PW
:
15921 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
15923 case OPC_SHRAV_R_PW
:
15925 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
15929 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
15933 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15935 case OPC_SHRA_R_QH
:
15937 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
15939 case OPC_SHRAV_R_QH
:
15941 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15945 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
15949 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
15953 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
15957 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
15959 default: /* Invalid */
15960 MIPS_INVAL("MASK SHLL.OB");
15961 generate_exception_end(ctx
, EXCP_RI
);
15969 tcg_temp_free(v1_t
);
15970 tcg_temp_free(v2_t
);
15973 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
15974 int ret
, int v1
, int v2
, int check_ret
)
15980 if ((ret
== 0) && (check_ret
== 1)) {
15981 /* Treat as NOP. */
15985 t0
= tcg_temp_new_i32();
15986 v1_t
= tcg_temp_new();
15987 v2_t
= tcg_temp_new();
15989 tcg_gen_movi_i32(t0
, ret
);
15990 gen_load_gpr(v1_t
, v1
);
15991 gen_load_gpr(v2_t
, v2
);
15994 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
15995 * the same mask and op1. */
15996 case OPC_MULT_G_2E
:
16000 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16003 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16006 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16008 case OPC_MULQ_RS_W
:
16009 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16013 case OPC_DPA_W_PH_DSP
:
16015 case OPC_DPAU_H_QBL
:
16017 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16019 case OPC_DPAU_H_QBR
:
16021 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16023 case OPC_DPSU_H_QBL
:
16025 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
16027 case OPC_DPSU_H_QBR
:
16029 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
16033 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16035 case OPC_DPAX_W_PH
:
16037 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16039 case OPC_DPAQ_S_W_PH
:
16041 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16043 case OPC_DPAQX_S_W_PH
:
16045 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16047 case OPC_DPAQX_SA_W_PH
:
16049 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16053 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16055 case OPC_DPSX_W_PH
:
16057 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16059 case OPC_DPSQ_S_W_PH
:
16061 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16063 case OPC_DPSQX_S_W_PH
:
16065 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16067 case OPC_DPSQX_SA_W_PH
:
16069 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16071 case OPC_MULSAQ_S_W_PH
:
16073 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16075 case OPC_DPAQ_SA_L_W
:
16077 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16079 case OPC_DPSQ_SA_L_W
:
16081 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
16083 case OPC_MAQ_S_W_PHL
:
16085 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16087 case OPC_MAQ_S_W_PHR
:
16089 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16091 case OPC_MAQ_SA_W_PHL
:
16093 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
16095 case OPC_MAQ_SA_W_PHR
:
16097 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
16099 case OPC_MULSA_W_PH
:
16101 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
16105 #ifdef TARGET_MIPS64
16106 case OPC_DPAQ_W_QH_DSP
:
16108 int ac
= ret
& 0x03;
16109 tcg_gen_movi_i32(t0
, ac
);
16114 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
16118 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
16122 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
16126 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
16130 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16132 case OPC_DPAQ_S_W_QH
:
16134 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16136 case OPC_DPAQ_SA_L_PW
:
16138 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16140 case OPC_DPAU_H_OBL
:
16142 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16144 case OPC_DPAU_H_OBR
:
16146 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16150 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16152 case OPC_DPSQ_S_W_QH
:
16154 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16156 case OPC_DPSQ_SA_L_PW
:
16158 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16160 case OPC_DPSU_H_OBL
:
16162 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
16164 case OPC_DPSU_H_OBR
:
16166 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
16168 case OPC_MAQ_S_L_PWL
:
16170 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
16172 case OPC_MAQ_S_L_PWR
:
16174 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
16176 case OPC_MAQ_S_W_QHLL
:
16178 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16180 case OPC_MAQ_SA_W_QHLL
:
16182 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
16184 case OPC_MAQ_S_W_QHLR
:
16186 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16188 case OPC_MAQ_SA_W_QHLR
:
16190 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
16192 case OPC_MAQ_S_W_QHRL
:
16194 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16196 case OPC_MAQ_SA_W_QHRL
:
16198 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
16200 case OPC_MAQ_S_W_QHRR
:
16202 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16204 case OPC_MAQ_SA_W_QHRR
:
16206 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
16208 case OPC_MULSAQ_S_L_PW
:
16210 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
16212 case OPC_MULSAQ_S_W_QH
:
16214 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
16220 case OPC_ADDU_QB_DSP
:
16222 case OPC_MULEU_S_PH_QBL
:
16224 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16226 case OPC_MULEU_S_PH_QBR
:
16228 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16230 case OPC_MULQ_RS_PH
:
16232 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16234 case OPC_MULEQ_S_W_PHL
:
16236 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16238 case OPC_MULEQ_S_W_PHR
:
16240 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16242 case OPC_MULQ_S_PH
:
16244 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16248 #ifdef TARGET_MIPS64
16249 case OPC_ADDU_OB_DSP
:
16251 case OPC_MULEQ_S_PW_QHL
:
16253 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16255 case OPC_MULEQ_S_PW_QHR
:
16257 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16259 case OPC_MULEU_S_QH_OBL
:
16261 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16263 case OPC_MULEU_S_QH_OBR
:
16265 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16267 case OPC_MULQ_RS_QH
:
16269 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16276 tcg_temp_free_i32(t0
);
16277 tcg_temp_free(v1_t
);
16278 tcg_temp_free(v2_t
);
16281 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16289 /* Treat as NOP. */
16293 t0
= tcg_temp_new();
16294 val_t
= tcg_temp_new();
16295 gen_load_gpr(val_t
, val
);
16298 case OPC_ABSQ_S_PH_DSP
:
16302 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
16307 target_long result
;
16308 imm
= (ctx
->opcode
>> 16) & 0xFF;
16309 result
= (uint32_t)imm
<< 24 |
16310 (uint32_t)imm
<< 16 |
16311 (uint32_t)imm
<< 8 |
16313 result
= (int32_t)result
;
16314 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
16319 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16320 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16321 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16322 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16323 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16324 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16329 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16330 imm
= (int16_t)(imm
<< 6) >> 6;
16331 tcg_gen_movi_tl(cpu_gpr
[ret
], \
16332 (target_long
)((int32_t)imm
<< 16 | \
16338 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16339 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16340 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16341 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
16345 #ifdef TARGET_MIPS64
16346 case OPC_ABSQ_S_QH_DSP
:
16353 imm
= (ctx
->opcode
>> 16) & 0xFF;
16354 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
16355 temp
= (temp
<< 16) | temp
;
16356 temp
= (temp
<< 32) | temp
;
16357 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16365 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16366 imm
= (int16_t)(imm
<< 6) >> 6;
16367 temp
= ((target_long
)imm
<< 32) \
16368 | ((target_long
)imm
& 0xFFFFFFFF);
16369 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16377 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16378 imm
= (int16_t)(imm
<< 6) >> 6;
16380 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
16381 ((uint64_t)(uint16_t)imm
<< 32) |
16382 ((uint64_t)(uint16_t)imm
<< 16) |
16383 (uint64_t)(uint16_t)imm
;
16384 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
16389 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
16390 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
16391 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16392 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16393 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16394 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16395 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16399 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
16400 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16401 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16405 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
16406 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
16407 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16408 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
16409 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
16416 tcg_temp_free(val_t
);
16419 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
16420 uint32_t op1
, uint32_t op2
,
16421 int ret
, int v1
, int v2
, int check_ret
)
16427 if ((ret
== 0) && (check_ret
== 1)) {
16428 /* Treat as NOP. */
16432 t1
= tcg_temp_new();
16433 v1_t
= tcg_temp_new();
16434 v2_t
= tcg_temp_new();
16436 gen_load_gpr(v1_t
, v1
);
16437 gen_load_gpr(v2_t
, v2
);
16440 case OPC_CMPU_EQ_QB_DSP
:
16442 case OPC_CMPU_EQ_QB
:
16444 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
16446 case OPC_CMPU_LT_QB
:
16448 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
16450 case OPC_CMPU_LE_QB
:
16452 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
16454 case OPC_CMPGU_EQ_QB
:
16456 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16458 case OPC_CMPGU_LT_QB
:
16460 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16462 case OPC_CMPGU_LE_QB
:
16464 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
16466 case OPC_CMPGDU_EQ_QB
:
16468 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
16469 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16470 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16471 tcg_gen_shli_tl(t1
, t1
, 24);
16472 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16474 case OPC_CMPGDU_LT_QB
:
16476 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
16477 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16478 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16479 tcg_gen_shli_tl(t1
, t1
, 24);
16480 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16482 case OPC_CMPGDU_LE_QB
:
16484 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
16485 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
16486 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
16487 tcg_gen_shli_tl(t1
, t1
, 24);
16488 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
16490 case OPC_CMP_EQ_PH
:
16492 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
16494 case OPC_CMP_LT_PH
:
16496 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
16498 case OPC_CMP_LE_PH
:
16500 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
16504 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16508 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16510 case OPC_PACKRL_PH
:
16512 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
16516 #ifdef TARGET_MIPS64
16517 case OPC_CMPU_EQ_OB_DSP
:
16519 case OPC_CMP_EQ_PW
:
16521 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
16523 case OPC_CMP_LT_PW
:
16525 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
16527 case OPC_CMP_LE_PW
:
16529 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
16531 case OPC_CMP_EQ_QH
:
16533 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
16535 case OPC_CMP_LT_QH
:
16537 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
16539 case OPC_CMP_LE_QH
:
16541 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
16543 case OPC_CMPGDU_EQ_OB
:
16545 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16547 case OPC_CMPGDU_LT_OB
:
16549 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16551 case OPC_CMPGDU_LE_OB
:
16553 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16555 case OPC_CMPGU_EQ_OB
:
16557 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16559 case OPC_CMPGU_LT_OB
:
16561 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16563 case OPC_CMPGU_LE_OB
:
16565 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
16567 case OPC_CMPU_EQ_OB
:
16569 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
16571 case OPC_CMPU_LT_OB
:
16573 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
16575 case OPC_CMPU_LE_OB
:
16577 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
16579 case OPC_PACKRL_PW
:
16581 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
16585 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16589 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16593 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
16601 tcg_temp_free(v1_t
);
16602 tcg_temp_free(v2_t
);
16605 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
16606 uint32_t op1
, int rt
, int rs
, int sa
)
16613 /* Treat as NOP. */
16617 t0
= tcg_temp_new();
16618 gen_load_gpr(t0
, rs
);
16621 case OPC_APPEND_DSP
:
16622 switch (MASK_APPEND(ctx
->opcode
)) {
16625 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
16627 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16631 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16632 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16633 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
16634 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16636 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16640 if (sa
!= 0 && sa
!= 2) {
16641 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16642 tcg_gen_ext32u_tl(t0
, t0
);
16643 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
16644 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16646 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
16648 default: /* Invalid */
16649 MIPS_INVAL("MASK APPEND");
16650 generate_exception_end(ctx
, EXCP_RI
);
16654 #ifdef TARGET_MIPS64
16655 case OPC_DAPPEND_DSP
:
16656 switch (MASK_DAPPEND(ctx
->opcode
)) {
16659 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
16663 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
16664 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
16665 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
16669 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
16670 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
16671 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16676 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
16677 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
16678 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
16679 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
16682 default: /* Invalid */
16683 MIPS_INVAL("MASK DAPPEND");
16684 generate_exception_end(ctx
, EXCP_RI
);
16693 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
16694 int ret
, int v1
, int v2
, int check_ret
)
16703 if ((ret
== 0) && (check_ret
== 1)) {
16704 /* Treat as NOP. */
16708 t0
= tcg_temp_new();
16709 t1
= tcg_temp_new();
16710 v1_t
= tcg_temp_new();
16711 v2_t
= tcg_temp_new();
16713 gen_load_gpr(v1_t
, v1
);
16714 gen_load_gpr(v2_t
, v2
);
16717 case OPC_EXTR_W_DSP
:
16721 tcg_gen_movi_tl(t0
, v2
);
16722 tcg_gen_movi_tl(t1
, v1
);
16723 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16726 tcg_gen_movi_tl(t0
, v2
);
16727 tcg_gen_movi_tl(t1
, v1
);
16728 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16730 case OPC_EXTR_RS_W
:
16731 tcg_gen_movi_tl(t0
, v2
);
16732 tcg_gen_movi_tl(t1
, v1
);
16733 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16736 tcg_gen_movi_tl(t0
, v2
);
16737 tcg_gen_movi_tl(t1
, v1
);
16738 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16740 case OPC_EXTRV_S_H
:
16741 tcg_gen_movi_tl(t0
, v2
);
16742 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16745 tcg_gen_movi_tl(t0
, v2
);
16746 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16748 case OPC_EXTRV_R_W
:
16749 tcg_gen_movi_tl(t0
, v2
);
16750 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16752 case OPC_EXTRV_RS_W
:
16753 tcg_gen_movi_tl(t0
, v2
);
16754 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16757 tcg_gen_movi_tl(t0
, v2
);
16758 tcg_gen_movi_tl(t1
, v1
);
16759 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16762 tcg_gen_movi_tl(t0
, v2
);
16763 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16766 tcg_gen_movi_tl(t0
, v2
);
16767 tcg_gen_movi_tl(t1
, v1
);
16768 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16771 tcg_gen_movi_tl(t0
, v2
);
16772 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16775 imm
= (ctx
->opcode
>> 20) & 0x3F;
16776 tcg_gen_movi_tl(t0
, ret
);
16777 tcg_gen_movi_tl(t1
, imm
);
16778 gen_helper_shilo(t0
, t1
, cpu_env
);
16781 tcg_gen_movi_tl(t0
, ret
);
16782 gen_helper_shilo(t0
, v1_t
, cpu_env
);
16785 tcg_gen_movi_tl(t0
, ret
);
16786 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
16789 imm
= (ctx
->opcode
>> 11) & 0x3FF;
16790 tcg_gen_movi_tl(t0
, imm
);
16791 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
16794 imm
= (ctx
->opcode
>> 16) & 0x03FF;
16795 tcg_gen_movi_tl(t0
, imm
);
16796 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
16800 #ifdef TARGET_MIPS64
16801 case OPC_DEXTR_W_DSP
:
16805 tcg_gen_movi_tl(t0
, ret
);
16806 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
16810 int shift
= (ctx
->opcode
>> 19) & 0x7F;
16811 int ac
= (ctx
->opcode
>> 11) & 0x03;
16812 tcg_gen_movi_tl(t0
, shift
);
16813 tcg_gen_movi_tl(t1
, ac
);
16814 gen_helper_dshilo(t0
, t1
, cpu_env
);
16819 int ac
= (ctx
->opcode
>> 11) & 0x03;
16820 tcg_gen_movi_tl(t0
, ac
);
16821 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
16825 tcg_gen_movi_tl(t0
, v2
);
16826 tcg_gen_movi_tl(t1
, v1
);
16828 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16831 tcg_gen_movi_tl(t0
, v2
);
16832 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16835 tcg_gen_movi_tl(t0
, v2
);
16836 tcg_gen_movi_tl(t1
, v1
);
16837 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16840 tcg_gen_movi_tl(t0
, v2
);
16841 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16844 tcg_gen_movi_tl(t0
, v2
);
16845 tcg_gen_movi_tl(t1
, v1
);
16846 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16848 case OPC_DEXTR_R_L
:
16849 tcg_gen_movi_tl(t0
, v2
);
16850 tcg_gen_movi_tl(t1
, v1
);
16851 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16853 case OPC_DEXTR_RS_L
:
16854 tcg_gen_movi_tl(t0
, v2
);
16855 tcg_gen_movi_tl(t1
, v1
);
16856 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16859 tcg_gen_movi_tl(t0
, v2
);
16860 tcg_gen_movi_tl(t1
, v1
);
16861 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16863 case OPC_DEXTR_R_W
:
16864 tcg_gen_movi_tl(t0
, v2
);
16865 tcg_gen_movi_tl(t1
, v1
);
16866 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16868 case OPC_DEXTR_RS_W
:
16869 tcg_gen_movi_tl(t0
, v2
);
16870 tcg_gen_movi_tl(t1
, v1
);
16871 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16873 case OPC_DEXTR_S_H
:
16874 tcg_gen_movi_tl(t0
, v2
);
16875 tcg_gen_movi_tl(t1
, v1
);
16876 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16878 case OPC_DEXTRV_S_H
:
16879 tcg_gen_movi_tl(t0
, v2
);
16880 tcg_gen_movi_tl(t1
, v1
);
16881 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
16884 tcg_gen_movi_tl(t0
, v2
);
16885 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16887 case OPC_DEXTRV_R_L
:
16888 tcg_gen_movi_tl(t0
, v2
);
16889 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16891 case OPC_DEXTRV_RS_L
:
16892 tcg_gen_movi_tl(t0
, v2
);
16893 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16896 tcg_gen_movi_tl(t0
, v2
);
16897 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16899 case OPC_DEXTRV_R_W
:
16900 tcg_gen_movi_tl(t0
, v2
);
16901 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16903 case OPC_DEXTRV_RS_W
:
16904 tcg_gen_movi_tl(t0
, v2
);
16905 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
16914 tcg_temp_free(v1_t
);
16915 tcg_temp_free(v2_t
);
16918 /* End MIPSDSP functions. */
16920 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
16922 int rs
, rt
, rd
, sa
;
16925 rs
= (ctx
->opcode
>> 21) & 0x1f;
16926 rt
= (ctx
->opcode
>> 16) & 0x1f;
16927 rd
= (ctx
->opcode
>> 11) & 0x1f;
16928 sa
= (ctx
->opcode
>> 6) & 0x1f;
16930 op1
= MASK_SPECIAL(ctx
->opcode
);
16933 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16935 case OPC_MULT
... OPC_DIVU
:
16936 op2
= MASK_R6_MULDIV(ctx
->opcode
);
16946 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
16949 MIPS_INVAL("special_r6 muldiv");
16950 generate_exception_end(ctx
, EXCP_RI
);
16956 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
16960 if (rt
== 0 && sa
== 1) {
16961 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16962 We need additionally to check other fields */
16963 gen_cl(ctx
, op1
, rd
, rs
);
16965 generate_exception_end(ctx
, EXCP_RI
);
16969 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
16970 gen_helper_do_semihosting(cpu_env
);
16972 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
16973 generate_exception_end(ctx
, EXCP_RI
);
16975 generate_exception_end(ctx
, EXCP_DBp
);
16979 #if defined(TARGET_MIPS64)
16981 check_mips_64(ctx
);
16982 gen_lsa(ctx
, op1
, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 2));
16986 if (rt
== 0 && sa
== 1) {
16987 /* Major opcode and function field is shared with preR6 MFHI/MTHI.
16988 We need additionally to check other fields */
16989 check_mips_64(ctx
);
16990 gen_cl(ctx
, op1
, rd
, rs
);
16992 generate_exception_end(ctx
, EXCP_RI
);
16995 case OPC_DMULT
... OPC_DDIVU
:
16996 op2
= MASK_R6_MULDIV(ctx
->opcode
);
17006 check_mips_64(ctx
);
17007 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
17010 MIPS_INVAL("special_r6 muldiv");
17011 generate_exception_end(ctx
, EXCP_RI
);
17016 default: /* Invalid */
17017 MIPS_INVAL("special_r6");
17018 generate_exception_end(ctx
, EXCP_RI
);
17023 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17025 int rs
, rt
, rd
, sa
;
17028 rs
= (ctx
->opcode
>> 21) & 0x1f;
17029 rt
= (ctx
->opcode
>> 16) & 0x1f;
17030 rd
= (ctx
->opcode
>> 11) & 0x1f;
17031 sa
= (ctx
->opcode
>> 6) & 0x1f;
17033 op1
= MASK_SPECIAL(ctx
->opcode
);
17035 case OPC_MOVN
: /* Conditional move */
17037 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
|
17038 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
17039 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
17041 case OPC_MFHI
: /* Move from HI/LO */
17043 gen_HILO(ctx
, op1
, rs
& 3, rd
);
17046 case OPC_MTLO
: /* Move to HI/LO */
17047 gen_HILO(ctx
, op1
, rd
& 3, rs
);
17050 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
17051 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
17052 check_cp1_enabled(ctx
);
17053 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
17054 (ctx
->opcode
>> 16) & 1);
17056 generate_exception_err(ctx
, EXCP_CpU
, 1);
17062 check_insn(ctx
, INSN_VR54XX
);
17063 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
17064 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
17066 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17071 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17073 #if defined(TARGET_MIPS64)
17074 case OPC_DMULT
... OPC_DDIVU
:
17075 check_insn(ctx
, ISA_MIPS3
);
17076 check_mips_64(ctx
);
17077 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
17081 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17084 #ifdef MIPS_STRICT_STANDARD
17085 MIPS_INVAL("SPIM");
17086 generate_exception_end(ctx
, EXCP_RI
);
17088 /* Implemented as RI exception for now. */
17089 MIPS_INVAL("spim (unofficial)");
17090 generate_exception_end(ctx
, EXCP_RI
);
17093 default: /* Invalid */
17094 MIPS_INVAL("special_legacy");
17095 generate_exception_end(ctx
, EXCP_RI
);
17100 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
17102 int rs
, rt
, rd
, sa
;
17105 rs
= (ctx
->opcode
>> 21) & 0x1f;
17106 rt
= (ctx
->opcode
>> 16) & 0x1f;
17107 rd
= (ctx
->opcode
>> 11) & 0x1f;
17108 sa
= (ctx
->opcode
>> 6) & 0x1f;
17110 op1
= MASK_SPECIAL(ctx
->opcode
);
17112 case OPC_SLL
: /* Shift with immediate */
17113 if (sa
== 5 && rd
== 0 &&
17114 rs
== 0 && rt
== 0) { /* PAUSE */
17115 if ((ctx
->insn_flags
& ISA_MIPS32R6
) &&
17116 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
17117 generate_exception_end(ctx
, EXCP_RI
);
17123 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17126 switch ((ctx
->opcode
>> 21) & 0x1f) {
17128 /* rotr is decoded as srl on non-R2 CPUs */
17129 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17134 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17137 generate_exception_end(ctx
, EXCP_RI
);
17141 case OPC_ADD
... OPC_SUBU
:
17142 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17144 case OPC_SLLV
: /* Shifts */
17146 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17149 switch ((ctx
->opcode
>> 6) & 0x1f) {
17151 /* rotrv is decoded as srlv on non-R2 CPUs */
17152 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17157 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17160 generate_exception_end(ctx
, EXCP_RI
);
17164 case OPC_SLT
: /* Set on less than */
17166 gen_slt(ctx
, op1
, rd
, rs
, rt
);
17168 case OPC_AND
: /* Logic*/
17172 gen_logic(ctx
, op1
, rd
, rs
, rt
);
17175 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
17177 case OPC_TGE
... OPC_TEQ
: /* Traps */
17179 check_insn(ctx
, ISA_MIPS2
);
17180 gen_trap(ctx
, op1
, rs
, rt
, -1);
17182 case OPC_LSA
: /* OPC_PMON */
17183 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17184 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17185 decode_opc_special_r6(env
, ctx
);
17187 /* Pmon entry point, also R4010 selsl */
17188 #ifdef MIPS_STRICT_STANDARD
17189 MIPS_INVAL("PMON / selsl");
17190 generate_exception_end(ctx
, EXCP_RI
);
17192 gen_helper_0e0i(pmon
, sa
);
17197 generate_exception_end(ctx
, EXCP_SYSCALL
);
17200 generate_exception_end(ctx
, EXCP_BREAK
);
17203 check_insn(ctx
, ISA_MIPS2
);
17204 /* Treat as NOP. */
17207 #if defined(TARGET_MIPS64)
17208 /* MIPS64 specific opcodes */
17213 check_insn(ctx
, ISA_MIPS3
);
17214 check_mips_64(ctx
);
17215 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17218 switch ((ctx
->opcode
>> 21) & 0x1f) {
17220 /* drotr is decoded as dsrl on non-R2 CPUs */
17221 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17226 check_insn(ctx
, ISA_MIPS3
);
17227 check_mips_64(ctx
);
17228 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17231 generate_exception_end(ctx
, EXCP_RI
);
17236 switch ((ctx
->opcode
>> 21) & 0x1f) {
17238 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
17239 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17244 check_insn(ctx
, ISA_MIPS3
);
17245 check_mips_64(ctx
);
17246 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
17249 generate_exception_end(ctx
, EXCP_RI
);
17253 case OPC_DADD
... OPC_DSUBU
:
17254 check_insn(ctx
, ISA_MIPS3
);
17255 check_mips_64(ctx
);
17256 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17260 check_insn(ctx
, ISA_MIPS3
);
17261 check_mips_64(ctx
);
17262 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17265 switch ((ctx
->opcode
>> 6) & 0x1f) {
17267 /* drotrv is decoded as dsrlv on non-R2 CPUs */
17268 if (ctx
->insn_flags
& ISA_MIPS32R2
) {
17273 check_insn(ctx
, ISA_MIPS3
);
17274 check_mips_64(ctx
);
17275 gen_shift(ctx
, op1
, rd
, rs
, rt
);
17278 generate_exception_end(ctx
, EXCP_RI
);
17283 if ((ctx
->insn_flags
& ISA_MIPS32R6
) ||
17284 (env
->CP0_Config3
& (1 << CP0C3_MSAP
))) {
17285 decode_opc_special_r6(env
, ctx
);
17290 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
17291 decode_opc_special_r6(env
, ctx
);
17293 decode_opc_special_legacy(env
, ctx
);
17298 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17303 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
17305 rs
= (ctx
->opcode
>> 21) & 0x1f;
17306 rt
= (ctx
->opcode
>> 16) & 0x1f;
17307 rd
= (ctx
->opcode
>> 11) & 0x1f;
17309 op1
= MASK_SPECIAL2(ctx
->opcode
);
17311 case OPC_MADD
... OPC_MADDU
: /* Multiply and add/sub */
17312 case OPC_MSUB
... OPC_MSUBU
:
17313 check_insn(ctx
, ISA_MIPS32
);
17314 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
17317 gen_arith(ctx
, op1
, rd
, rs
, rt
);
17320 case OPC_DIVU_G_2F
:
17321 case OPC_MULT_G_2F
:
17322 case OPC_MULTU_G_2F
:
17324 case OPC_MODU_G_2F
:
17325 check_insn(ctx
, INSN_LOONGSON2F
);
17326 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17330 check_insn(ctx
, ISA_MIPS32
);
17331 gen_cl(ctx
, op1
, rd
, rs
);
17334 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
17335 gen_helper_do_semihosting(cpu_env
);
17337 /* XXX: not clear which exception should be raised
17338 * when in debug mode...
17340 check_insn(ctx
, ISA_MIPS32
);
17341 generate_exception_end(ctx
, EXCP_DBp
);
17344 #if defined(TARGET_MIPS64)
17347 check_insn(ctx
, ISA_MIPS64
);
17348 check_mips_64(ctx
);
17349 gen_cl(ctx
, op1
, rd
, rs
);
17351 case OPC_DMULT_G_2F
:
17352 case OPC_DMULTU_G_2F
:
17353 case OPC_DDIV_G_2F
:
17354 case OPC_DDIVU_G_2F
:
17355 case OPC_DMOD_G_2F
:
17356 case OPC_DMODU_G_2F
:
17357 check_insn(ctx
, INSN_LOONGSON2F
);
17358 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17361 default: /* Invalid */
17362 MIPS_INVAL("special2_legacy");
17363 generate_exception_end(ctx
, EXCP_RI
);
17368 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
17370 int rs
, rt
, rd
, sa
;
17374 rs
= (ctx
->opcode
>> 21) & 0x1f;
17375 rt
= (ctx
->opcode
>> 16) & 0x1f;
17376 rd
= (ctx
->opcode
>> 11) & 0x1f;
17377 sa
= (ctx
->opcode
>> 6) & 0x1f;
17378 imm
= (int16_t)ctx
->opcode
>> 7;
17380 op1
= MASK_SPECIAL3(ctx
->opcode
);
17384 /* hint codes 24-31 are reserved and signal RI */
17385 generate_exception_end(ctx
, EXCP_RI
);
17387 /* Treat as NOP. */
17390 check_cp0_enabled(ctx
);
17391 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17392 gen_cache_operation(ctx
, rt
, rs
, imm
);
17396 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17399 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17404 /* Treat as NOP. */
17407 op2
= MASK_BSHFL(ctx
->opcode
);
17409 case OPC_ALIGN
... OPC_ALIGN_END
:
17410 gen_align(ctx
, OPC_ALIGN
, rd
, rs
, rt
, sa
& 3);
17413 gen_bitswap(ctx
, op2
, rd
, rt
);
17418 #if defined(TARGET_MIPS64)
17420 gen_st_cond(ctx
, op1
, rt
, rs
, imm
);
17423 gen_ld(ctx
, op1
, rt
, rs
, imm
);
17426 check_mips_64(ctx
);
17429 /* Treat as NOP. */
17432 op2
= MASK_DBSHFL(ctx
->opcode
);
17434 case OPC_DALIGN
... OPC_DALIGN_END
:
17435 gen_align(ctx
, OPC_DALIGN
, rd
, rs
, rt
, sa
& 7);
17438 gen_bitswap(ctx
, op2
, rd
, rt
);
17445 default: /* Invalid */
17446 MIPS_INVAL("special3_r6");
17447 generate_exception_end(ctx
, EXCP_RI
);
17452 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
17457 rs
= (ctx
->opcode
>> 21) & 0x1f;
17458 rt
= (ctx
->opcode
>> 16) & 0x1f;
17459 rd
= (ctx
->opcode
>> 11) & 0x1f;
17461 op1
= MASK_SPECIAL3(ctx
->opcode
);
17463 case OPC_DIV_G_2E
... OPC_DIVU_G_2E
:
17464 case OPC_MOD_G_2E
... OPC_MODU_G_2E
:
17465 case OPC_MULT_G_2E
... OPC_MULTU_G_2E
:
17466 /* OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
17467 * the same mask and op1. */
17468 if ((ctx
->insn_flags
& ASE_DSPR2
) && (op1
== OPC_MULT_G_2E
)) {
17469 op2
= MASK_ADDUH_QB(ctx
->opcode
);
17472 case OPC_ADDUH_R_QB
:
17474 case OPC_ADDQH_R_PH
:
17476 case OPC_ADDQH_R_W
:
17478 case OPC_SUBUH_R_QB
:
17480 case OPC_SUBQH_R_PH
:
17482 case OPC_SUBQH_R_W
:
17483 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17488 case OPC_MULQ_RS_W
:
17489 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17492 MIPS_INVAL("MASK ADDUH.QB");
17493 generate_exception_end(ctx
, EXCP_RI
);
17496 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
17497 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17499 generate_exception_end(ctx
, EXCP_RI
);
17503 op2
= MASK_LX(ctx
->opcode
);
17505 #if defined(TARGET_MIPS64)
17511 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
17513 default: /* Invalid */
17514 MIPS_INVAL("MASK LX");
17515 generate_exception_end(ctx
, EXCP_RI
);
17519 case OPC_ABSQ_S_PH_DSP
:
17520 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
17522 case OPC_ABSQ_S_QB
:
17523 case OPC_ABSQ_S_PH
:
17525 case OPC_PRECEQ_W_PHL
:
17526 case OPC_PRECEQ_W_PHR
:
17527 case OPC_PRECEQU_PH_QBL
:
17528 case OPC_PRECEQU_PH_QBR
:
17529 case OPC_PRECEQU_PH_QBLA
:
17530 case OPC_PRECEQU_PH_QBRA
:
17531 case OPC_PRECEU_PH_QBL
:
17532 case OPC_PRECEU_PH_QBR
:
17533 case OPC_PRECEU_PH_QBLA
:
17534 case OPC_PRECEU_PH_QBRA
:
17535 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17542 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17545 MIPS_INVAL("MASK ABSQ_S.PH");
17546 generate_exception_end(ctx
, EXCP_RI
);
17550 case OPC_ADDU_QB_DSP
:
17551 op2
= MASK_ADDU_QB(ctx
->opcode
);
17554 case OPC_ADDQ_S_PH
:
17557 case OPC_ADDU_S_QB
:
17559 case OPC_ADDU_S_PH
:
17561 case OPC_SUBQ_S_PH
:
17564 case OPC_SUBU_S_QB
:
17566 case OPC_SUBU_S_PH
:
17570 case OPC_RADDU_W_QB
:
17571 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17573 case OPC_MULEU_S_PH_QBL
:
17574 case OPC_MULEU_S_PH_QBR
:
17575 case OPC_MULQ_RS_PH
:
17576 case OPC_MULEQ_S_W_PHL
:
17577 case OPC_MULEQ_S_W_PHR
:
17578 case OPC_MULQ_S_PH
:
17579 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17581 default: /* Invalid */
17582 MIPS_INVAL("MASK ADDU.QB");
17583 generate_exception_end(ctx
, EXCP_RI
);
17588 case OPC_CMPU_EQ_QB_DSP
:
17589 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
17591 case OPC_PRECR_SRA_PH_W
:
17592 case OPC_PRECR_SRA_R_PH_W
:
17593 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17595 case OPC_PRECR_QB_PH
:
17596 case OPC_PRECRQ_QB_PH
:
17597 case OPC_PRECRQ_PH_W
:
17598 case OPC_PRECRQ_RS_PH_W
:
17599 case OPC_PRECRQU_S_QB_PH
:
17600 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17602 case OPC_CMPU_EQ_QB
:
17603 case OPC_CMPU_LT_QB
:
17604 case OPC_CMPU_LE_QB
:
17605 case OPC_CMP_EQ_PH
:
17606 case OPC_CMP_LT_PH
:
17607 case OPC_CMP_LE_PH
:
17608 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17610 case OPC_CMPGU_EQ_QB
:
17611 case OPC_CMPGU_LT_QB
:
17612 case OPC_CMPGU_LE_QB
:
17613 case OPC_CMPGDU_EQ_QB
:
17614 case OPC_CMPGDU_LT_QB
:
17615 case OPC_CMPGDU_LE_QB
:
17618 case OPC_PACKRL_PH
:
17619 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17621 default: /* Invalid */
17622 MIPS_INVAL("MASK CMPU.EQ.QB");
17623 generate_exception_end(ctx
, EXCP_RI
);
17627 case OPC_SHLL_QB_DSP
:
17628 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17630 case OPC_DPA_W_PH_DSP
:
17631 op2
= MASK_DPA_W_PH(ctx
->opcode
);
17633 case OPC_DPAU_H_QBL
:
17634 case OPC_DPAU_H_QBR
:
17635 case OPC_DPSU_H_QBL
:
17636 case OPC_DPSU_H_QBR
:
17638 case OPC_DPAX_W_PH
:
17639 case OPC_DPAQ_S_W_PH
:
17640 case OPC_DPAQX_S_W_PH
:
17641 case OPC_DPAQX_SA_W_PH
:
17643 case OPC_DPSX_W_PH
:
17644 case OPC_DPSQ_S_W_PH
:
17645 case OPC_DPSQX_S_W_PH
:
17646 case OPC_DPSQX_SA_W_PH
:
17647 case OPC_MULSAQ_S_W_PH
:
17648 case OPC_DPAQ_SA_L_W
:
17649 case OPC_DPSQ_SA_L_W
:
17650 case OPC_MAQ_S_W_PHL
:
17651 case OPC_MAQ_S_W_PHR
:
17652 case OPC_MAQ_SA_W_PHL
:
17653 case OPC_MAQ_SA_W_PHR
:
17654 case OPC_MULSA_W_PH
:
17655 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17657 default: /* Invalid */
17658 MIPS_INVAL("MASK DPAW.PH");
17659 generate_exception_end(ctx
, EXCP_RI
);
17664 op2
= MASK_INSV(ctx
->opcode
);
17675 t0
= tcg_temp_new();
17676 t1
= tcg_temp_new();
17678 gen_load_gpr(t0
, rt
);
17679 gen_load_gpr(t1
, rs
);
17681 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17687 default: /* Invalid */
17688 MIPS_INVAL("MASK INSV");
17689 generate_exception_end(ctx
, EXCP_RI
);
17693 case OPC_APPEND_DSP
:
17694 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17696 case OPC_EXTR_W_DSP
:
17697 op2
= MASK_EXTR_W(ctx
->opcode
);
17701 case OPC_EXTR_RS_W
:
17703 case OPC_EXTRV_S_H
:
17705 case OPC_EXTRV_R_W
:
17706 case OPC_EXTRV_RS_W
:
17711 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17714 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17720 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17722 default: /* Invalid */
17723 MIPS_INVAL("MASK EXTR.W");
17724 generate_exception_end(ctx
, EXCP_RI
);
17728 #if defined(TARGET_MIPS64)
17729 case OPC_DDIV_G_2E
... OPC_DDIVU_G_2E
:
17730 case OPC_DMULT_G_2E
... OPC_DMULTU_G_2E
:
17731 case OPC_DMOD_G_2E
... OPC_DMODU_G_2E
:
17732 check_insn(ctx
, INSN_LOONGSON2E
);
17733 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
17735 case OPC_ABSQ_S_QH_DSP
:
17736 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
17738 case OPC_PRECEQ_L_PWL
:
17739 case OPC_PRECEQ_L_PWR
:
17740 case OPC_PRECEQ_PW_QHL
:
17741 case OPC_PRECEQ_PW_QHR
:
17742 case OPC_PRECEQ_PW_QHLA
:
17743 case OPC_PRECEQ_PW_QHRA
:
17744 case OPC_PRECEQU_QH_OBL
:
17745 case OPC_PRECEQU_QH_OBR
:
17746 case OPC_PRECEQU_QH_OBLA
:
17747 case OPC_PRECEQU_QH_OBRA
:
17748 case OPC_PRECEU_QH_OBL
:
17749 case OPC_PRECEU_QH_OBR
:
17750 case OPC_PRECEU_QH_OBLA
:
17751 case OPC_PRECEU_QH_OBRA
:
17752 case OPC_ABSQ_S_OB
:
17753 case OPC_ABSQ_S_PW
:
17754 case OPC_ABSQ_S_QH
:
17755 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17763 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
17765 default: /* Invalid */
17766 MIPS_INVAL("MASK ABSQ_S.QH");
17767 generate_exception_end(ctx
, EXCP_RI
);
17771 case OPC_ADDU_OB_DSP
:
17772 op2
= MASK_ADDU_OB(ctx
->opcode
);
17774 case OPC_RADDU_L_OB
:
17776 case OPC_SUBQ_S_PW
:
17778 case OPC_SUBQ_S_QH
:
17780 case OPC_SUBU_S_OB
:
17782 case OPC_SUBU_S_QH
:
17784 case OPC_SUBUH_R_OB
:
17786 case OPC_ADDQ_S_PW
:
17788 case OPC_ADDQ_S_QH
:
17790 case OPC_ADDU_S_OB
:
17792 case OPC_ADDU_S_QH
:
17794 case OPC_ADDUH_R_OB
:
17795 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17797 case OPC_MULEQ_S_PW_QHL
:
17798 case OPC_MULEQ_S_PW_QHR
:
17799 case OPC_MULEU_S_QH_OBL
:
17800 case OPC_MULEU_S_QH_OBR
:
17801 case OPC_MULQ_RS_QH
:
17802 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17804 default: /* Invalid */
17805 MIPS_INVAL("MASK ADDU.OB");
17806 generate_exception_end(ctx
, EXCP_RI
);
17810 case OPC_CMPU_EQ_OB_DSP
:
17811 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
17813 case OPC_PRECR_SRA_QH_PW
:
17814 case OPC_PRECR_SRA_R_QH_PW
:
17815 /* Return value is rt. */
17816 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
17818 case OPC_PRECR_OB_QH
:
17819 case OPC_PRECRQ_OB_QH
:
17820 case OPC_PRECRQ_PW_L
:
17821 case OPC_PRECRQ_QH_PW
:
17822 case OPC_PRECRQ_RS_QH_PW
:
17823 case OPC_PRECRQU_S_OB_QH
:
17824 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
17826 case OPC_CMPU_EQ_OB
:
17827 case OPC_CMPU_LT_OB
:
17828 case OPC_CMPU_LE_OB
:
17829 case OPC_CMP_EQ_QH
:
17830 case OPC_CMP_LT_QH
:
17831 case OPC_CMP_LE_QH
:
17832 case OPC_CMP_EQ_PW
:
17833 case OPC_CMP_LT_PW
:
17834 case OPC_CMP_LE_PW
:
17835 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17837 case OPC_CMPGDU_EQ_OB
:
17838 case OPC_CMPGDU_LT_OB
:
17839 case OPC_CMPGDU_LE_OB
:
17840 case OPC_CMPGU_EQ_OB
:
17841 case OPC_CMPGU_LT_OB
:
17842 case OPC_CMPGU_LE_OB
:
17843 case OPC_PACKRL_PW
:
17847 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
17849 default: /* Invalid */
17850 MIPS_INVAL("MASK CMPU_EQ.OB");
17851 generate_exception_end(ctx
, EXCP_RI
);
17855 case OPC_DAPPEND_DSP
:
17856 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
17858 case OPC_DEXTR_W_DSP
:
17859 op2
= MASK_DEXTR_W(ctx
->opcode
);
17866 case OPC_DEXTR_R_L
:
17867 case OPC_DEXTR_RS_L
:
17869 case OPC_DEXTR_R_W
:
17870 case OPC_DEXTR_RS_W
:
17871 case OPC_DEXTR_S_H
:
17873 case OPC_DEXTRV_R_L
:
17874 case OPC_DEXTRV_RS_L
:
17875 case OPC_DEXTRV_S_H
:
17877 case OPC_DEXTRV_R_W
:
17878 case OPC_DEXTRV_RS_W
:
17879 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
17884 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17886 default: /* Invalid */
17887 MIPS_INVAL("MASK EXTR.W");
17888 generate_exception_end(ctx
, EXCP_RI
);
17892 case OPC_DPAQ_W_QH_DSP
:
17893 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
17895 case OPC_DPAU_H_OBL
:
17896 case OPC_DPAU_H_OBR
:
17897 case OPC_DPSU_H_OBL
:
17898 case OPC_DPSU_H_OBR
:
17900 case OPC_DPAQ_S_W_QH
:
17902 case OPC_DPSQ_S_W_QH
:
17903 case OPC_MULSAQ_S_W_QH
:
17904 case OPC_DPAQ_SA_L_PW
:
17905 case OPC_DPSQ_SA_L_PW
:
17906 case OPC_MULSAQ_S_L_PW
:
17907 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17909 case OPC_MAQ_S_W_QHLL
:
17910 case OPC_MAQ_S_W_QHLR
:
17911 case OPC_MAQ_S_W_QHRL
:
17912 case OPC_MAQ_S_W_QHRR
:
17913 case OPC_MAQ_SA_W_QHLL
:
17914 case OPC_MAQ_SA_W_QHLR
:
17915 case OPC_MAQ_SA_W_QHRL
:
17916 case OPC_MAQ_SA_W_QHRR
:
17917 case OPC_MAQ_S_L_PWL
:
17918 case OPC_MAQ_S_L_PWR
:
17923 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
17925 default: /* Invalid */
17926 MIPS_INVAL("MASK DPAQ.W.QH");
17927 generate_exception_end(ctx
, EXCP_RI
);
17931 case OPC_DINSV_DSP
:
17932 op2
= MASK_INSV(ctx
->opcode
);
17943 t0
= tcg_temp_new();
17944 t1
= tcg_temp_new();
17946 gen_load_gpr(t0
, rt
);
17947 gen_load_gpr(t1
, rs
);
17949 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
17955 default: /* Invalid */
17956 MIPS_INVAL("MASK DINSV");
17957 generate_exception_end(ctx
, EXCP_RI
);
17961 case OPC_SHLL_OB_DSP
:
17962 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
17965 default: /* Invalid */
17966 MIPS_INVAL("special3_legacy");
17967 generate_exception_end(ctx
, EXCP_RI
);
17972 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
17974 int rs
, rt
, rd
, sa
;
17977 rs
= (ctx
->opcode
>> 21) & 0x1f;
17978 rt
= (ctx
->opcode
>> 16) & 0x1f;
17979 rd
= (ctx
->opcode
>> 11) & 0x1f;
17980 sa
= (ctx
->opcode
>> 6) & 0x1f;
17982 op1
= MASK_SPECIAL3(ctx
->opcode
);
17986 check_insn(ctx
, ISA_MIPS32R2
);
17987 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
17990 op2
= MASK_BSHFL(ctx
->opcode
);
17992 case OPC_ALIGN
... OPC_ALIGN_END
:
17994 check_insn(ctx
, ISA_MIPS32R6
);
17995 decode_opc_special3_r6(env
, ctx
);
17998 check_insn(ctx
, ISA_MIPS32R2
);
17999 gen_bshfl(ctx
, op2
, rt
, rd
);
18003 #if defined(TARGET_MIPS64)
18004 case OPC_DEXTM
... OPC_DEXT
:
18005 case OPC_DINSM
... OPC_DINS
:
18006 check_insn(ctx
, ISA_MIPS64R2
);
18007 check_mips_64(ctx
);
18008 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
18011 op2
= MASK_DBSHFL(ctx
->opcode
);
18013 case OPC_DALIGN
... OPC_DALIGN_END
:
18015 check_insn(ctx
, ISA_MIPS32R6
);
18016 decode_opc_special3_r6(env
, ctx
);
18019 check_insn(ctx
, ISA_MIPS64R2
);
18020 check_mips_64(ctx
);
18021 op2
= MASK_DBSHFL(ctx
->opcode
);
18022 gen_bshfl(ctx
, op2
, rt
, rd
);
18028 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
18031 check_insn(ctx
, ASE_MT
);
18033 TCGv t0
= tcg_temp_new();
18034 TCGv t1
= tcg_temp_new();
18036 gen_load_gpr(t0
, rt
);
18037 gen_load_gpr(t1
, rs
);
18038 gen_helper_fork(t0
, t1
);
18044 check_insn(ctx
, ASE_MT
);
18046 TCGv t0
= tcg_temp_new();
18048 gen_load_gpr(t0
, rs
);
18049 gen_helper_yield(t0
, cpu_env
, t0
);
18050 gen_store_gpr(t0
, rd
);
18055 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
18056 decode_opc_special3_r6(env
, ctx
);
18058 decode_opc_special3_legacy(env
, ctx
);
18063 /* MIPS SIMD Architecture (MSA) */
18064 static inline int check_msa_access(DisasContext
*ctx
)
18066 if (unlikely((ctx
->hflags
& MIPS_HFLAG_FPU
) &&
18067 !(ctx
->hflags
& MIPS_HFLAG_F64
))) {
18068 generate_exception_end(ctx
, EXCP_RI
);
18072 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_MSA
))) {
18073 if (ctx
->insn_flags
& ASE_MSA
) {
18074 generate_exception_end(ctx
, EXCP_MSADIS
);
18077 generate_exception_end(ctx
, EXCP_RI
);
18084 static void gen_check_zero_element(TCGv tresult
, uint8_t df
, uint8_t wt
)
18086 /* generates tcg ops to check if any element is 0 */
18087 /* Note this function only works with MSA_WRLEN = 128 */
18088 uint64_t eval_zero_or_big
= 0;
18089 uint64_t eval_big
= 0;
18090 TCGv_i64 t0
= tcg_temp_new_i64();
18091 TCGv_i64 t1
= tcg_temp_new_i64();
18094 eval_zero_or_big
= 0x0101010101010101ULL
;
18095 eval_big
= 0x8080808080808080ULL
;
18098 eval_zero_or_big
= 0x0001000100010001ULL
;
18099 eval_big
= 0x8000800080008000ULL
;
18102 eval_zero_or_big
= 0x0000000100000001ULL
;
18103 eval_big
= 0x8000000080000000ULL
;
18106 eval_zero_or_big
= 0x0000000000000001ULL
;
18107 eval_big
= 0x8000000000000000ULL
;
18110 tcg_gen_subi_i64(t0
, msa_wr_d
[wt
<<1], eval_zero_or_big
);
18111 tcg_gen_andc_i64(t0
, t0
, msa_wr_d
[wt
<<1]);
18112 tcg_gen_andi_i64(t0
, t0
, eval_big
);
18113 tcg_gen_subi_i64(t1
, msa_wr_d
[(wt
<<1)+1], eval_zero_or_big
);
18114 tcg_gen_andc_i64(t1
, t1
, msa_wr_d
[(wt
<<1)+1]);
18115 tcg_gen_andi_i64(t1
, t1
, eval_big
);
18116 tcg_gen_or_i64(t0
, t0
, t1
);
18117 /* if all bits are zero then all elements are not zero */
18118 /* if some bit is non-zero then some element is zero */
18119 tcg_gen_setcondi_i64(TCG_COND_NE
, t0
, t0
, 0);
18120 tcg_gen_trunc_i64_tl(tresult
, t0
);
18121 tcg_temp_free_i64(t0
);
18122 tcg_temp_free_i64(t1
);
18125 static void gen_msa_branch(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t op1
)
18127 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18128 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18129 int64_t s16
= (int16_t)ctx
->opcode
;
18131 check_msa_access(ctx
);
18133 if (ctx
->insn_flags
& ISA_MIPS32R6
&& ctx
->hflags
& MIPS_HFLAG_BMASK
) {
18134 generate_exception_end(ctx
, EXCP_RI
);
18141 TCGv_i64 t0
= tcg_temp_new_i64();
18142 tcg_gen_or_i64(t0
, msa_wr_d
[wt
<<1], msa_wr_d
[(wt
<<1)+1]);
18143 tcg_gen_setcondi_i64((op1
== OPC_BZ_V
) ?
18144 TCG_COND_EQ
: TCG_COND_NE
, t0
, t0
, 0);
18145 tcg_gen_trunc_i64_tl(bcond
, t0
);
18146 tcg_temp_free_i64(t0
);
18153 gen_check_zero_element(bcond
, df
, wt
);
18159 gen_check_zero_element(bcond
, df
, wt
);
18160 tcg_gen_setcondi_tl(TCG_COND_EQ
, bcond
, bcond
, 0);
18164 ctx
->btarget
= ctx
->pc
+ (s16
<< 2) + 4;
18166 ctx
->hflags
|= MIPS_HFLAG_BC
;
18167 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
18170 static void gen_msa_i8(CPUMIPSState
*env
, DisasContext
*ctx
)
18172 #define MASK_MSA_I8(op) (MASK_MSA_MINOR(op) | (op & (0x03 << 24)))
18173 uint8_t i8
= (ctx
->opcode
>> 16) & 0xff;
18174 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18175 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18177 TCGv_i32 twd
= tcg_const_i32(wd
);
18178 TCGv_i32 tws
= tcg_const_i32(ws
);
18179 TCGv_i32 ti8
= tcg_const_i32(i8
);
18181 switch (MASK_MSA_I8(ctx
->opcode
)) {
18183 gen_helper_msa_andi_b(cpu_env
, twd
, tws
, ti8
);
18186 gen_helper_msa_ori_b(cpu_env
, twd
, tws
, ti8
);
18189 gen_helper_msa_nori_b(cpu_env
, twd
, tws
, ti8
);
18192 gen_helper_msa_xori_b(cpu_env
, twd
, tws
, ti8
);
18195 gen_helper_msa_bmnzi_b(cpu_env
, twd
, tws
, ti8
);
18198 gen_helper_msa_bmzi_b(cpu_env
, twd
, tws
, ti8
);
18201 gen_helper_msa_bseli_b(cpu_env
, twd
, tws
, ti8
);
18207 uint8_t df
= (ctx
->opcode
>> 24) & 0x3;
18208 if (df
== DF_DOUBLE
) {
18209 generate_exception_end(ctx
, EXCP_RI
);
18211 TCGv_i32 tdf
= tcg_const_i32(df
);
18212 gen_helper_msa_shf_df(cpu_env
, tdf
, twd
, tws
, ti8
);
18213 tcg_temp_free_i32(tdf
);
18218 MIPS_INVAL("MSA instruction");
18219 generate_exception_end(ctx
, EXCP_RI
);
18223 tcg_temp_free_i32(twd
);
18224 tcg_temp_free_i32(tws
);
18225 tcg_temp_free_i32(ti8
);
18228 static void gen_msa_i5(CPUMIPSState
*env
, DisasContext
*ctx
)
18230 #define MASK_MSA_I5(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18231 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18232 int8_t s5
= (int8_t) sextract32(ctx
->opcode
, 16, 5);
18233 uint8_t u5
= (ctx
->opcode
>> 16) & 0x1f;
18234 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18235 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18237 TCGv_i32 tdf
= tcg_const_i32(df
);
18238 TCGv_i32 twd
= tcg_const_i32(wd
);
18239 TCGv_i32 tws
= tcg_const_i32(ws
);
18240 TCGv_i32 timm
= tcg_temp_new_i32();
18241 tcg_gen_movi_i32(timm
, u5
);
18243 switch (MASK_MSA_I5(ctx
->opcode
)) {
18245 gen_helper_msa_addvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18248 gen_helper_msa_subvi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18250 case OPC_MAXI_S_df
:
18251 tcg_gen_movi_i32(timm
, s5
);
18252 gen_helper_msa_maxi_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18254 case OPC_MAXI_U_df
:
18255 gen_helper_msa_maxi_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18257 case OPC_MINI_S_df
:
18258 tcg_gen_movi_i32(timm
, s5
);
18259 gen_helper_msa_mini_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18261 case OPC_MINI_U_df
:
18262 gen_helper_msa_mini_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18265 tcg_gen_movi_i32(timm
, s5
);
18266 gen_helper_msa_ceqi_df(cpu_env
, tdf
, twd
, tws
, timm
);
18268 case OPC_CLTI_S_df
:
18269 tcg_gen_movi_i32(timm
, s5
);
18270 gen_helper_msa_clti_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18272 case OPC_CLTI_U_df
:
18273 gen_helper_msa_clti_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18275 case OPC_CLEI_S_df
:
18276 tcg_gen_movi_i32(timm
, s5
);
18277 gen_helper_msa_clei_s_df(cpu_env
, tdf
, twd
, tws
, timm
);
18279 case OPC_CLEI_U_df
:
18280 gen_helper_msa_clei_u_df(cpu_env
, tdf
, twd
, tws
, timm
);
18284 int32_t s10
= sextract32(ctx
->opcode
, 11, 10);
18285 tcg_gen_movi_i32(timm
, s10
);
18286 gen_helper_msa_ldi_df(cpu_env
, tdf
, twd
, timm
);
18290 MIPS_INVAL("MSA instruction");
18291 generate_exception_end(ctx
, EXCP_RI
);
18295 tcg_temp_free_i32(tdf
);
18296 tcg_temp_free_i32(twd
);
18297 tcg_temp_free_i32(tws
);
18298 tcg_temp_free_i32(timm
);
18301 static void gen_msa_bit(CPUMIPSState
*env
, DisasContext
*ctx
)
18303 #define MASK_MSA_BIT(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18304 uint8_t dfm
= (ctx
->opcode
>> 16) & 0x7f;
18305 uint32_t df
= 0, m
= 0;
18306 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18307 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18314 if ((dfm
& 0x40) == 0x00) {
18317 } else if ((dfm
& 0x60) == 0x40) {
18320 } else if ((dfm
& 0x70) == 0x60) {
18323 } else if ((dfm
& 0x78) == 0x70) {
18327 generate_exception_end(ctx
, EXCP_RI
);
18331 tdf
= tcg_const_i32(df
);
18332 tm
= tcg_const_i32(m
);
18333 twd
= tcg_const_i32(wd
);
18334 tws
= tcg_const_i32(ws
);
18336 switch (MASK_MSA_BIT(ctx
->opcode
)) {
18338 gen_helper_msa_slli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18341 gen_helper_msa_srai_df(cpu_env
, tdf
, twd
, tws
, tm
);
18344 gen_helper_msa_srli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18347 gen_helper_msa_bclri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18350 gen_helper_msa_bseti_df(cpu_env
, tdf
, twd
, tws
, tm
);
18353 gen_helper_msa_bnegi_df(cpu_env
, tdf
, twd
, tws
, tm
);
18355 case OPC_BINSLI_df
:
18356 gen_helper_msa_binsli_df(cpu_env
, tdf
, twd
, tws
, tm
);
18358 case OPC_BINSRI_df
:
18359 gen_helper_msa_binsri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18362 gen_helper_msa_sat_s_df(cpu_env
, tdf
, twd
, tws
, tm
);
18365 gen_helper_msa_sat_u_df(cpu_env
, tdf
, twd
, tws
, tm
);
18368 gen_helper_msa_srari_df(cpu_env
, tdf
, twd
, tws
, tm
);
18371 gen_helper_msa_srlri_df(cpu_env
, tdf
, twd
, tws
, tm
);
18374 MIPS_INVAL("MSA instruction");
18375 generate_exception_end(ctx
, EXCP_RI
);
18379 tcg_temp_free_i32(tdf
);
18380 tcg_temp_free_i32(tm
);
18381 tcg_temp_free_i32(twd
);
18382 tcg_temp_free_i32(tws
);
18385 static void gen_msa_3r(CPUMIPSState
*env
, DisasContext
*ctx
)
18387 #define MASK_MSA_3R(op) (MASK_MSA_MINOR(op) | (op & (0x7 << 23)))
18388 uint8_t df
= (ctx
->opcode
>> 21) & 0x3;
18389 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18390 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18391 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18393 TCGv_i32 tdf
= tcg_const_i32(df
);
18394 TCGv_i32 twd
= tcg_const_i32(wd
);
18395 TCGv_i32 tws
= tcg_const_i32(ws
);
18396 TCGv_i32 twt
= tcg_const_i32(wt
);
18398 switch (MASK_MSA_3R(ctx
->opcode
)) {
18400 gen_helper_msa_sll_df(cpu_env
, tdf
, twd
, tws
, twt
);
18403 gen_helper_msa_addv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18406 gen_helper_msa_ceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18409 gen_helper_msa_add_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18411 case OPC_SUBS_S_df
:
18412 gen_helper_msa_subs_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18415 gen_helper_msa_mulv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18418 gen_helper_msa_sld_df(cpu_env
, tdf
, twd
, tws
, twt
);
18421 gen_helper_msa_vshf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18424 gen_helper_msa_sra_df(cpu_env
, tdf
, twd
, tws
, twt
);
18427 gen_helper_msa_subv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18429 case OPC_ADDS_A_df
:
18430 gen_helper_msa_adds_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18432 case OPC_SUBS_U_df
:
18433 gen_helper_msa_subs_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18436 gen_helper_msa_maddv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18439 gen_helper_msa_splat_df(cpu_env
, tdf
, twd
, tws
, twt
);
18442 gen_helper_msa_srar_df(cpu_env
, tdf
, twd
, tws
, twt
);
18445 gen_helper_msa_srl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18448 gen_helper_msa_max_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18451 gen_helper_msa_clt_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18453 case OPC_ADDS_S_df
:
18454 gen_helper_msa_adds_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18456 case OPC_SUBSUS_U_df
:
18457 gen_helper_msa_subsus_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18460 gen_helper_msa_msubv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18463 gen_helper_msa_pckev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18466 gen_helper_msa_srlr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18469 gen_helper_msa_bclr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18472 gen_helper_msa_max_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18475 gen_helper_msa_clt_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18477 case OPC_ADDS_U_df
:
18478 gen_helper_msa_adds_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18480 case OPC_SUBSUU_S_df
:
18481 gen_helper_msa_subsuu_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18484 gen_helper_msa_pckod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18487 gen_helper_msa_bset_df(cpu_env
, tdf
, twd
, tws
, twt
);
18490 gen_helper_msa_min_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18493 gen_helper_msa_cle_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18496 gen_helper_msa_ave_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18498 case OPC_ASUB_S_df
:
18499 gen_helper_msa_asub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18502 gen_helper_msa_div_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18505 gen_helper_msa_ilvl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18508 gen_helper_msa_bneg_df(cpu_env
, tdf
, twd
, tws
, twt
);
18511 gen_helper_msa_min_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18514 gen_helper_msa_cle_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18517 gen_helper_msa_ave_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18519 case OPC_ASUB_U_df
:
18520 gen_helper_msa_asub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18523 gen_helper_msa_div_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18526 gen_helper_msa_ilvr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18529 gen_helper_msa_binsl_df(cpu_env
, tdf
, twd
, tws
, twt
);
18532 gen_helper_msa_max_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18534 case OPC_AVER_S_df
:
18535 gen_helper_msa_aver_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18538 gen_helper_msa_mod_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18541 gen_helper_msa_ilvev_df(cpu_env
, tdf
, twd
, tws
, twt
);
18544 gen_helper_msa_binsr_df(cpu_env
, tdf
, twd
, tws
, twt
);
18547 gen_helper_msa_min_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18549 case OPC_AVER_U_df
:
18550 gen_helper_msa_aver_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18553 gen_helper_msa_mod_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18556 gen_helper_msa_ilvod_df(cpu_env
, tdf
, twd
, tws
, twt
);
18559 case OPC_DOTP_S_df
:
18560 case OPC_DOTP_U_df
:
18561 case OPC_DPADD_S_df
:
18562 case OPC_DPADD_U_df
:
18563 case OPC_DPSUB_S_df
:
18564 case OPC_HADD_S_df
:
18565 case OPC_DPSUB_U_df
:
18566 case OPC_HADD_U_df
:
18567 case OPC_HSUB_S_df
:
18568 case OPC_HSUB_U_df
:
18569 if (df
== DF_BYTE
) {
18570 generate_exception_end(ctx
, EXCP_RI
);
18573 switch (MASK_MSA_3R(ctx
->opcode
)) {
18574 case OPC_DOTP_S_df
:
18575 gen_helper_msa_dotp_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18577 case OPC_DOTP_U_df
:
18578 gen_helper_msa_dotp_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18580 case OPC_DPADD_S_df
:
18581 gen_helper_msa_dpadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18583 case OPC_DPADD_U_df
:
18584 gen_helper_msa_dpadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18586 case OPC_DPSUB_S_df
:
18587 gen_helper_msa_dpsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18589 case OPC_HADD_S_df
:
18590 gen_helper_msa_hadd_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18592 case OPC_DPSUB_U_df
:
18593 gen_helper_msa_dpsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18595 case OPC_HADD_U_df
:
18596 gen_helper_msa_hadd_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18598 case OPC_HSUB_S_df
:
18599 gen_helper_msa_hsub_s_df(cpu_env
, tdf
, twd
, tws
, twt
);
18601 case OPC_HSUB_U_df
:
18602 gen_helper_msa_hsub_u_df(cpu_env
, tdf
, twd
, tws
, twt
);
18607 MIPS_INVAL("MSA instruction");
18608 generate_exception_end(ctx
, EXCP_RI
);
18611 tcg_temp_free_i32(twd
);
18612 tcg_temp_free_i32(tws
);
18613 tcg_temp_free_i32(twt
);
18614 tcg_temp_free_i32(tdf
);
18617 static void gen_msa_elm_3e(CPUMIPSState
*env
, DisasContext
*ctx
)
18619 #define MASK_MSA_ELM_DF3E(op) (MASK_MSA_MINOR(op) | (op & (0x3FF << 16)))
18620 uint8_t source
= (ctx
->opcode
>> 11) & 0x1f;
18621 uint8_t dest
= (ctx
->opcode
>> 6) & 0x1f;
18622 TCGv telm
= tcg_temp_new();
18623 TCGv_i32 tsr
= tcg_const_i32(source
);
18624 TCGv_i32 tdt
= tcg_const_i32(dest
);
18626 switch (MASK_MSA_ELM_DF3E(ctx
->opcode
)) {
18628 gen_load_gpr(telm
, source
);
18629 gen_helper_msa_ctcmsa(cpu_env
, telm
, tdt
);
18632 gen_helper_msa_cfcmsa(telm
, cpu_env
, tsr
);
18633 gen_store_gpr(telm
, dest
);
18636 gen_helper_msa_move_v(cpu_env
, tdt
, tsr
);
18639 MIPS_INVAL("MSA instruction");
18640 generate_exception_end(ctx
, EXCP_RI
);
18644 tcg_temp_free(telm
);
18645 tcg_temp_free_i32(tdt
);
18646 tcg_temp_free_i32(tsr
);
18649 static void gen_msa_elm_df(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t df
,
18652 #define MASK_MSA_ELM(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18653 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18654 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18656 TCGv_i32 tws
= tcg_const_i32(ws
);
18657 TCGv_i32 twd
= tcg_const_i32(wd
);
18658 TCGv_i32 tn
= tcg_const_i32(n
);
18659 TCGv_i32 tdf
= tcg_const_i32(df
);
18661 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18663 gen_helper_msa_sldi_df(cpu_env
, tdf
, twd
, tws
, tn
);
18665 case OPC_SPLATI_df
:
18666 gen_helper_msa_splati_df(cpu_env
, tdf
, twd
, tws
, tn
);
18669 gen_helper_msa_insve_df(cpu_env
, tdf
, twd
, tws
, tn
);
18671 case OPC_COPY_S_df
:
18672 case OPC_COPY_U_df
:
18673 case OPC_INSERT_df
:
18674 #if !defined(TARGET_MIPS64)
18675 /* Double format valid only for MIPS64 */
18676 if (df
== DF_DOUBLE
) {
18677 generate_exception_end(ctx
, EXCP_RI
);
18681 switch (MASK_MSA_ELM(ctx
->opcode
)) {
18682 case OPC_COPY_S_df
:
18683 gen_helper_msa_copy_s_df(cpu_env
, tdf
, twd
, tws
, tn
);
18685 case OPC_COPY_U_df
:
18686 gen_helper_msa_copy_u_df(cpu_env
, tdf
, twd
, tws
, tn
);
18688 case OPC_INSERT_df
:
18689 gen_helper_msa_insert_df(cpu_env
, tdf
, twd
, tws
, tn
);
18694 MIPS_INVAL("MSA instruction");
18695 generate_exception_end(ctx
, EXCP_RI
);
18697 tcg_temp_free_i32(twd
);
18698 tcg_temp_free_i32(tws
);
18699 tcg_temp_free_i32(tn
);
18700 tcg_temp_free_i32(tdf
);
18703 static void gen_msa_elm(CPUMIPSState
*env
, DisasContext
*ctx
)
18705 uint8_t dfn
= (ctx
->opcode
>> 16) & 0x3f;
18706 uint32_t df
= 0, n
= 0;
18708 if ((dfn
& 0x30) == 0x00) {
18711 } else if ((dfn
& 0x38) == 0x20) {
18714 } else if ((dfn
& 0x3c) == 0x30) {
18717 } else if ((dfn
& 0x3e) == 0x38) {
18720 } else if (dfn
== 0x3E) {
18721 /* CTCMSA, CFCMSA, MOVE.V */
18722 gen_msa_elm_3e(env
, ctx
);
18725 generate_exception_end(ctx
, EXCP_RI
);
18729 gen_msa_elm_df(env
, ctx
, df
, n
);
18732 static void gen_msa_3rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18734 #define MASK_MSA_3RF(op) (MASK_MSA_MINOR(op) | (op & (0xf << 22)))
18735 uint8_t df
= (ctx
->opcode
>> 21) & 0x1;
18736 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18737 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18738 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18740 TCGv_i32 twd
= tcg_const_i32(wd
);
18741 TCGv_i32 tws
= tcg_const_i32(ws
);
18742 TCGv_i32 twt
= tcg_const_i32(wt
);
18743 TCGv_i32 tdf
= tcg_temp_new_i32();
18745 /* adjust df value for floating-point instruction */
18746 tcg_gen_movi_i32(tdf
, df
+ 2);
18748 switch (MASK_MSA_3RF(ctx
->opcode
)) {
18750 gen_helper_msa_fcaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18753 gen_helper_msa_fadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18756 gen_helper_msa_fcun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18759 gen_helper_msa_fsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18762 gen_helper_msa_fcor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18765 gen_helper_msa_fceq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18768 gen_helper_msa_fmul_df(cpu_env
, tdf
, twd
, tws
, twt
);
18771 gen_helper_msa_fcune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18774 gen_helper_msa_fcueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18777 gen_helper_msa_fdiv_df(cpu_env
, tdf
, twd
, tws
, twt
);
18780 gen_helper_msa_fcne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18783 gen_helper_msa_fclt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18786 gen_helper_msa_fmadd_df(cpu_env
, tdf
, twd
, tws
, twt
);
18789 tcg_gen_movi_i32(tdf
, df
+ 1);
18790 gen_helper_msa_mul_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18793 gen_helper_msa_fcult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18796 gen_helper_msa_fmsub_df(cpu_env
, tdf
, twd
, tws
, twt
);
18798 case OPC_MADD_Q_df
:
18799 tcg_gen_movi_i32(tdf
, df
+ 1);
18800 gen_helper_msa_madd_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18803 gen_helper_msa_fcle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18805 case OPC_MSUB_Q_df
:
18806 tcg_gen_movi_i32(tdf
, df
+ 1);
18807 gen_helper_msa_msub_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18810 gen_helper_msa_fcule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18813 gen_helper_msa_fexp2_df(cpu_env
, tdf
, twd
, tws
, twt
);
18816 gen_helper_msa_fsaf_df(cpu_env
, tdf
, twd
, tws
, twt
);
18819 gen_helper_msa_fexdo_df(cpu_env
, tdf
, twd
, tws
, twt
);
18822 gen_helper_msa_fsun_df(cpu_env
, tdf
, twd
, tws
, twt
);
18825 gen_helper_msa_fsor_df(cpu_env
, tdf
, twd
, tws
, twt
);
18828 gen_helper_msa_fseq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18831 gen_helper_msa_ftq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18834 gen_helper_msa_fsune_df(cpu_env
, tdf
, twd
, tws
, twt
);
18837 gen_helper_msa_fsueq_df(cpu_env
, tdf
, twd
, tws
, twt
);
18840 gen_helper_msa_fsne_df(cpu_env
, tdf
, twd
, tws
, twt
);
18843 gen_helper_msa_fslt_df(cpu_env
, tdf
, twd
, tws
, twt
);
18846 gen_helper_msa_fmin_df(cpu_env
, tdf
, twd
, tws
, twt
);
18848 case OPC_MULR_Q_df
:
18849 tcg_gen_movi_i32(tdf
, df
+ 1);
18850 gen_helper_msa_mulr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18853 gen_helper_msa_fsult_df(cpu_env
, tdf
, twd
, tws
, twt
);
18855 case OPC_FMIN_A_df
:
18856 gen_helper_msa_fmin_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18858 case OPC_MADDR_Q_df
:
18859 tcg_gen_movi_i32(tdf
, df
+ 1);
18860 gen_helper_msa_maddr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18863 gen_helper_msa_fsle_df(cpu_env
, tdf
, twd
, tws
, twt
);
18866 gen_helper_msa_fmax_df(cpu_env
, tdf
, twd
, tws
, twt
);
18868 case OPC_MSUBR_Q_df
:
18869 tcg_gen_movi_i32(tdf
, df
+ 1);
18870 gen_helper_msa_msubr_q_df(cpu_env
, tdf
, twd
, tws
, twt
);
18873 gen_helper_msa_fsule_df(cpu_env
, tdf
, twd
, tws
, twt
);
18875 case OPC_FMAX_A_df
:
18876 gen_helper_msa_fmax_a_df(cpu_env
, tdf
, twd
, tws
, twt
);
18879 MIPS_INVAL("MSA instruction");
18880 generate_exception_end(ctx
, EXCP_RI
);
18884 tcg_temp_free_i32(twd
);
18885 tcg_temp_free_i32(tws
);
18886 tcg_temp_free_i32(twt
);
18887 tcg_temp_free_i32(tdf
);
18890 static void gen_msa_2r(CPUMIPSState
*env
, DisasContext
*ctx
)
18892 #define MASK_MSA_2R(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18893 (op & (0x7 << 18)))
18894 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18895 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18896 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18897 uint8_t df
= (ctx
->opcode
>> 16) & 0x3;
18898 TCGv_i32 twd
= tcg_const_i32(wd
);
18899 TCGv_i32 tws
= tcg_const_i32(ws
);
18900 TCGv_i32 twt
= tcg_const_i32(wt
);
18901 TCGv_i32 tdf
= tcg_const_i32(df
);
18903 switch (MASK_MSA_2R(ctx
->opcode
)) {
18905 #if !defined(TARGET_MIPS64)
18906 /* Double format valid only for MIPS64 */
18907 if (df
== DF_DOUBLE
) {
18908 generate_exception_end(ctx
, EXCP_RI
);
18912 gen_helper_msa_fill_df(cpu_env
, tdf
, twd
, tws
); /* trs */
18915 gen_helper_msa_pcnt_df(cpu_env
, tdf
, twd
, tws
);
18918 gen_helper_msa_nloc_df(cpu_env
, tdf
, twd
, tws
);
18921 gen_helper_msa_nlzc_df(cpu_env
, tdf
, twd
, tws
);
18924 MIPS_INVAL("MSA instruction");
18925 generate_exception_end(ctx
, EXCP_RI
);
18929 tcg_temp_free_i32(twd
);
18930 tcg_temp_free_i32(tws
);
18931 tcg_temp_free_i32(twt
);
18932 tcg_temp_free_i32(tdf
);
18935 static void gen_msa_2rf(CPUMIPSState
*env
, DisasContext
*ctx
)
18937 #define MASK_MSA_2RF(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)) | \
18938 (op & (0xf << 17)))
18939 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
18940 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
18941 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
18942 uint8_t df
= (ctx
->opcode
>> 16) & 0x1;
18943 TCGv_i32 twd
= tcg_const_i32(wd
);
18944 TCGv_i32 tws
= tcg_const_i32(ws
);
18945 TCGv_i32 twt
= tcg_const_i32(wt
);
18946 /* adjust df value for floating-point instruction */
18947 TCGv_i32 tdf
= tcg_const_i32(df
+ 2);
18949 switch (MASK_MSA_2RF(ctx
->opcode
)) {
18950 case OPC_FCLASS_df
:
18951 gen_helper_msa_fclass_df(cpu_env
, tdf
, twd
, tws
);
18953 case OPC_FTRUNC_S_df
:
18954 gen_helper_msa_ftrunc_s_df(cpu_env
, tdf
, twd
, tws
);
18956 case OPC_FTRUNC_U_df
:
18957 gen_helper_msa_ftrunc_u_df(cpu_env
, tdf
, twd
, tws
);
18960 gen_helper_msa_fsqrt_df(cpu_env
, tdf
, twd
, tws
);
18962 case OPC_FRSQRT_df
:
18963 gen_helper_msa_frsqrt_df(cpu_env
, tdf
, twd
, tws
);
18966 gen_helper_msa_frcp_df(cpu_env
, tdf
, twd
, tws
);
18969 gen_helper_msa_frint_df(cpu_env
, tdf
, twd
, tws
);
18972 gen_helper_msa_flog2_df(cpu_env
, tdf
, twd
, tws
);
18974 case OPC_FEXUPL_df
:
18975 gen_helper_msa_fexupl_df(cpu_env
, tdf
, twd
, tws
);
18977 case OPC_FEXUPR_df
:
18978 gen_helper_msa_fexupr_df(cpu_env
, tdf
, twd
, tws
);
18981 gen_helper_msa_ffql_df(cpu_env
, tdf
, twd
, tws
);
18984 gen_helper_msa_ffqr_df(cpu_env
, tdf
, twd
, tws
);
18986 case OPC_FTINT_S_df
:
18987 gen_helper_msa_ftint_s_df(cpu_env
, tdf
, twd
, tws
);
18989 case OPC_FTINT_U_df
:
18990 gen_helper_msa_ftint_u_df(cpu_env
, tdf
, twd
, tws
);
18992 case OPC_FFINT_S_df
:
18993 gen_helper_msa_ffint_s_df(cpu_env
, tdf
, twd
, tws
);
18995 case OPC_FFINT_U_df
:
18996 gen_helper_msa_ffint_u_df(cpu_env
, tdf
, twd
, tws
);
19000 tcg_temp_free_i32(twd
);
19001 tcg_temp_free_i32(tws
);
19002 tcg_temp_free_i32(twt
);
19003 tcg_temp_free_i32(tdf
);
19006 static void gen_msa_vec_v(CPUMIPSState
*env
, DisasContext
*ctx
)
19008 #define MASK_MSA_VEC(op) (MASK_MSA_MINOR(op) | (op & (0x1f << 21)))
19009 uint8_t wt
= (ctx
->opcode
>> 16) & 0x1f;
19010 uint8_t ws
= (ctx
->opcode
>> 11) & 0x1f;
19011 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19012 TCGv_i32 twd
= tcg_const_i32(wd
);
19013 TCGv_i32 tws
= tcg_const_i32(ws
);
19014 TCGv_i32 twt
= tcg_const_i32(wt
);
19016 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19018 gen_helper_msa_and_v(cpu_env
, twd
, tws
, twt
);
19021 gen_helper_msa_or_v(cpu_env
, twd
, tws
, twt
);
19024 gen_helper_msa_nor_v(cpu_env
, twd
, tws
, twt
);
19027 gen_helper_msa_xor_v(cpu_env
, twd
, tws
, twt
);
19030 gen_helper_msa_bmnz_v(cpu_env
, twd
, tws
, twt
);
19033 gen_helper_msa_bmz_v(cpu_env
, twd
, tws
, twt
);
19036 gen_helper_msa_bsel_v(cpu_env
, twd
, tws
, twt
);
19039 MIPS_INVAL("MSA instruction");
19040 generate_exception_end(ctx
, EXCP_RI
);
19044 tcg_temp_free_i32(twd
);
19045 tcg_temp_free_i32(tws
);
19046 tcg_temp_free_i32(twt
);
19049 static void gen_msa_vec(CPUMIPSState
*env
, DisasContext
*ctx
)
19051 switch (MASK_MSA_VEC(ctx
->opcode
)) {
19059 gen_msa_vec_v(env
, ctx
);
19062 gen_msa_2r(env
, ctx
);
19065 gen_msa_2rf(env
, ctx
);
19068 MIPS_INVAL("MSA instruction");
19069 generate_exception_end(ctx
, EXCP_RI
);
19074 static void gen_msa(CPUMIPSState
*env
, DisasContext
*ctx
)
19076 uint32_t opcode
= ctx
->opcode
;
19077 check_insn(ctx
, ASE_MSA
);
19078 check_msa_access(ctx
);
19080 switch (MASK_MSA_MINOR(opcode
)) {
19081 case OPC_MSA_I8_00
:
19082 case OPC_MSA_I8_01
:
19083 case OPC_MSA_I8_02
:
19084 gen_msa_i8(env
, ctx
);
19086 case OPC_MSA_I5_06
:
19087 case OPC_MSA_I5_07
:
19088 gen_msa_i5(env
, ctx
);
19090 case OPC_MSA_BIT_09
:
19091 case OPC_MSA_BIT_0A
:
19092 gen_msa_bit(env
, ctx
);
19094 case OPC_MSA_3R_0D
:
19095 case OPC_MSA_3R_0E
:
19096 case OPC_MSA_3R_0F
:
19097 case OPC_MSA_3R_10
:
19098 case OPC_MSA_3R_11
:
19099 case OPC_MSA_3R_12
:
19100 case OPC_MSA_3R_13
:
19101 case OPC_MSA_3R_14
:
19102 case OPC_MSA_3R_15
:
19103 gen_msa_3r(env
, ctx
);
19106 gen_msa_elm(env
, ctx
);
19108 case OPC_MSA_3RF_1A
:
19109 case OPC_MSA_3RF_1B
:
19110 case OPC_MSA_3RF_1C
:
19111 gen_msa_3rf(env
, ctx
);
19114 gen_msa_vec(env
, ctx
);
19125 int32_t s10
= sextract32(ctx
->opcode
, 16, 10);
19126 uint8_t rs
= (ctx
->opcode
>> 11) & 0x1f;
19127 uint8_t wd
= (ctx
->opcode
>> 6) & 0x1f;
19128 uint8_t df
= (ctx
->opcode
>> 0) & 0x3;
19130 TCGv_i32 twd
= tcg_const_i32(wd
);
19131 TCGv taddr
= tcg_temp_new();
19132 gen_base_offset_addr(ctx
, taddr
, rs
, s10
<< df
);
19134 switch (MASK_MSA_MINOR(opcode
)) {
19136 gen_helper_msa_ld_b(cpu_env
, twd
, taddr
);
19139 gen_helper_msa_ld_h(cpu_env
, twd
, taddr
);
19142 gen_helper_msa_ld_w(cpu_env
, twd
, taddr
);
19145 gen_helper_msa_ld_d(cpu_env
, twd
, taddr
);
19148 gen_helper_msa_st_b(cpu_env
, twd
, taddr
);
19151 gen_helper_msa_st_h(cpu_env
, twd
, taddr
);
19154 gen_helper_msa_st_w(cpu_env
, twd
, taddr
);
19157 gen_helper_msa_st_d(cpu_env
, twd
, taddr
);
19161 tcg_temp_free_i32(twd
);
19162 tcg_temp_free(taddr
);
19166 MIPS_INVAL("MSA instruction");
19167 generate_exception_end(ctx
, EXCP_RI
);
19173 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
19176 int rs
, rt
, rd
, sa
;
19180 /* make sure instructions are on a word boundary */
19181 if (ctx
->pc
& 0x3) {
19182 env
->CP0_BadVAddr
= ctx
->pc
;
19183 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
19187 /* Handle blikely not taken case */
19188 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
19189 TCGLabel
*l1
= gen_new_label();
19191 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
19192 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
19193 gen_goto_tb(ctx
, 1, ctx
->pc
+ 4);
19197 op
= MASK_OP_MAJOR(ctx
->opcode
);
19198 rs
= (ctx
->opcode
>> 21) & 0x1f;
19199 rt
= (ctx
->opcode
>> 16) & 0x1f;
19200 rd
= (ctx
->opcode
>> 11) & 0x1f;
19201 sa
= (ctx
->opcode
>> 6) & 0x1f;
19202 imm
= (int16_t)ctx
->opcode
;
19205 decode_opc_special(env
, ctx
);
19208 decode_opc_special2_legacy(env
, ctx
);
19211 decode_opc_special3(env
, ctx
);
19214 op1
= MASK_REGIMM(ctx
->opcode
);
19216 case OPC_BLTZL
: /* REGIMM branches */
19220 check_insn(ctx
, ISA_MIPS2
);
19221 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19225 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19229 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19231 /* OPC_NAL, OPC_BAL */
19232 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
19234 generate_exception_end(ctx
, EXCP_RI
);
19237 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
19240 case OPC_TGEI
... OPC_TEQI
: /* REGIMM traps */
19242 check_insn(ctx
, ISA_MIPS2
);
19243 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19244 gen_trap(ctx
, op1
, rs
, -1, imm
);
19247 check_insn(ctx
, ISA_MIPS32R6
);
19248 generate_exception_end(ctx
, EXCP_RI
);
19251 check_insn(ctx
, ISA_MIPS32R2
);
19252 /* Break the TB to be able to sync copied instructions
19254 ctx
->bstate
= BS_STOP
;
19256 case OPC_BPOSGE32
: /* MIPS DSP branch */
19257 #if defined(TARGET_MIPS64)
19261 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
19263 #if defined(TARGET_MIPS64)
19265 check_insn(ctx
, ISA_MIPS32R6
);
19266 check_mips_64(ctx
);
19268 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
19272 check_insn(ctx
, ISA_MIPS32R6
);
19273 check_mips_64(ctx
);
19275 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
19279 default: /* Invalid */
19280 MIPS_INVAL("regimm");
19281 generate_exception_end(ctx
, EXCP_RI
);
19286 check_cp0_enabled(ctx
);
19287 op1
= MASK_CP0(ctx
->opcode
);
19295 #if defined(TARGET_MIPS64)
19299 #ifndef CONFIG_USER_ONLY
19300 gen_cp0(env
, ctx
, op1
, rt
, rd
);
19301 #endif /* !CONFIG_USER_ONLY */
19303 case OPC_C0_FIRST
... OPC_C0_LAST
:
19304 #ifndef CONFIG_USER_ONLY
19305 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
19306 #endif /* !CONFIG_USER_ONLY */
19309 #ifndef CONFIG_USER_ONLY
19312 TCGv t0
= tcg_temp_new();
19314 op2
= MASK_MFMC0(ctx
->opcode
);
19317 check_insn(ctx
, ASE_MT
);
19318 gen_helper_dmt(t0
);
19319 gen_store_gpr(t0
, rt
);
19322 check_insn(ctx
, ASE_MT
);
19323 gen_helper_emt(t0
);
19324 gen_store_gpr(t0
, rt
);
19327 check_insn(ctx
, ASE_MT
);
19328 gen_helper_dvpe(t0
, cpu_env
);
19329 gen_store_gpr(t0
, rt
);
19332 check_insn(ctx
, ASE_MT
);
19333 gen_helper_evpe(t0
, cpu_env
);
19334 gen_store_gpr(t0
, rt
);
19337 check_insn(ctx
, ISA_MIPS32R6
);
19339 gen_helper_dvp(t0
, cpu_env
);
19340 gen_store_gpr(t0
, rt
);
19344 check_insn(ctx
, ISA_MIPS32R6
);
19346 gen_helper_evp(t0
, cpu_env
);
19347 gen_store_gpr(t0
, rt
);
19351 check_insn(ctx
, ISA_MIPS32R2
);
19352 save_cpu_state(ctx
, 1);
19353 gen_helper_di(t0
, cpu_env
);
19354 gen_store_gpr(t0
, rt
);
19355 /* Stop translation as we may have switched
19356 the execution mode. */
19357 ctx
->bstate
= BS_STOP
;
19360 check_insn(ctx
, ISA_MIPS32R2
);
19361 save_cpu_state(ctx
, 1);
19362 gen_helper_ei(t0
, cpu_env
);
19363 gen_store_gpr(t0
, rt
);
19364 /* Stop translation as we may have switched
19365 the execution mode. */
19366 ctx
->bstate
= BS_STOP
;
19368 default: /* Invalid */
19369 MIPS_INVAL("mfmc0");
19370 generate_exception_end(ctx
, EXCP_RI
);
19375 #endif /* !CONFIG_USER_ONLY */
19378 check_insn(ctx
, ISA_MIPS32R2
);
19379 gen_load_srsgpr(rt
, rd
);
19382 check_insn(ctx
, ISA_MIPS32R2
);
19383 gen_store_srsgpr(rt
, rd
);
19387 generate_exception_end(ctx
, EXCP_RI
);
19391 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
19392 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19393 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
19394 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19397 /* Arithmetic with immediate opcode */
19398 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19402 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19404 case OPC_SLTI
: /* Set on less than with immediate opcode */
19406 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
19408 case OPC_ANDI
: /* Arithmetic with immediate opcode */
19409 case OPC_LUI
: /* OPC_AUI */
19412 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
19414 case OPC_J
... OPC_JAL
: /* Jump */
19415 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19416 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19419 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
19420 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19422 generate_exception_end(ctx
, EXCP_RI
);
19425 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
19426 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19429 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19432 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
19433 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19435 generate_exception_end(ctx
, EXCP_RI
);
19438 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
19439 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19442 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19445 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
19448 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19450 check_insn(ctx
, ISA_MIPS32R6
);
19451 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
19452 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19455 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
19458 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19460 check_insn(ctx
, ISA_MIPS32R6
);
19461 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
19462 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19467 check_insn(ctx
, ISA_MIPS2
);
19468 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19472 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
19474 case OPC_LL
: /* Load and stores */
19475 check_insn(ctx
, ISA_MIPS2
);
19479 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19481 case OPC_LB
... OPC_LH
:
19482 case OPC_LW
... OPC_LHU
:
19483 gen_ld(ctx
, op
, rt
, rs
, imm
);
19487 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19489 case OPC_SB
... OPC_SH
:
19491 gen_st(ctx
, op
, rt
, rs
, imm
);
19494 check_insn(ctx
, ISA_MIPS2
);
19495 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19496 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19499 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19500 check_cp0_enabled(ctx
);
19501 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS32
);
19502 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
19503 gen_cache_operation(ctx
, rt
, rs
, imm
);
19505 /* Treat as NOP. */
19508 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19509 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32
);
19510 /* Treat as NOP. */
19513 /* Floating point (COP1). */
19518 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
19522 op1
= MASK_CP1(ctx
->opcode
);
19527 check_cp1_enabled(ctx
);
19528 check_insn(ctx
, ISA_MIPS32R2
);
19533 check_cp1_enabled(ctx
);
19534 gen_cp1(ctx
, op1
, rt
, rd
);
19536 #if defined(TARGET_MIPS64)
19539 check_cp1_enabled(ctx
);
19540 check_insn(ctx
, ISA_MIPS3
);
19541 check_mips_64(ctx
);
19542 gen_cp1(ctx
, op1
, rt
, rd
);
19545 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
19546 check_cp1_enabled(ctx
);
19547 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19549 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19554 check_insn(ctx
, ASE_MIPS3D
);
19555 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19556 (rt
>> 2) & 0x7, imm
<< 2);
19560 check_cp1_enabled(ctx
);
19561 check_insn(ctx
, ISA_MIPS32R6
);
19562 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
19566 check_cp1_enabled(ctx
);
19567 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19569 check_insn(ctx
, ASE_MIPS3D
);
19572 check_cp1_enabled(ctx
);
19573 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19574 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
19575 (rt
>> 2) & 0x7, imm
<< 2);
19582 check_cp1_enabled(ctx
);
19583 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19589 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
19590 check_cp1_enabled(ctx
);
19591 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19593 case R6_OPC_CMP_AF_S
:
19594 case R6_OPC_CMP_UN_S
:
19595 case R6_OPC_CMP_EQ_S
:
19596 case R6_OPC_CMP_UEQ_S
:
19597 case R6_OPC_CMP_LT_S
:
19598 case R6_OPC_CMP_ULT_S
:
19599 case R6_OPC_CMP_LE_S
:
19600 case R6_OPC_CMP_ULE_S
:
19601 case R6_OPC_CMP_SAF_S
:
19602 case R6_OPC_CMP_SUN_S
:
19603 case R6_OPC_CMP_SEQ_S
:
19604 case R6_OPC_CMP_SEUQ_S
:
19605 case R6_OPC_CMP_SLT_S
:
19606 case R6_OPC_CMP_SULT_S
:
19607 case R6_OPC_CMP_SLE_S
:
19608 case R6_OPC_CMP_SULE_S
:
19609 case R6_OPC_CMP_OR_S
:
19610 case R6_OPC_CMP_UNE_S
:
19611 case R6_OPC_CMP_NE_S
:
19612 case R6_OPC_CMP_SOR_S
:
19613 case R6_OPC_CMP_SUNE_S
:
19614 case R6_OPC_CMP_SNE_S
:
19615 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19617 case R6_OPC_CMP_AF_D
:
19618 case R6_OPC_CMP_UN_D
:
19619 case R6_OPC_CMP_EQ_D
:
19620 case R6_OPC_CMP_UEQ_D
:
19621 case R6_OPC_CMP_LT_D
:
19622 case R6_OPC_CMP_ULT_D
:
19623 case R6_OPC_CMP_LE_D
:
19624 case R6_OPC_CMP_ULE_D
:
19625 case R6_OPC_CMP_SAF_D
:
19626 case R6_OPC_CMP_SUN_D
:
19627 case R6_OPC_CMP_SEQ_D
:
19628 case R6_OPC_CMP_SEUQ_D
:
19629 case R6_OPC_CMP_SLT_D
:
19630 case R6_OPC_CMP_SULT_D
:
19631 case R6_OPC_CMP_SLE_D
:
19632 case R6_OPC_CMP_SULE_D
:
19633 case R6_OPC_CMP_OR_D
:
19634 case R6_OPC_CMP_UNE_D
:
19635 case R6_OPC_CMP_NE_D
:
19636 case R6_OPC_CMP_SOR_D
:
19637 case R6_OPC_CMP_SUNE_D
:
19638 case R6_OPC_CMP_SNE_D
:
19639 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
19642 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
19643 rt
, rd
, sa
, (imm
>> 8) & 0x7);
19648 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
19663 check_insn(ctx
, ASE_MSA
);
19664 gen_msa_branch(env
, ctx
, op1
);
19668 generate_exception_end(ctx
, EXCP_RI
);
19673 /* Compact branches [R6] and COP2 [non-R6] */
19674 case OPC_BC
: /* OPC_LWC2 */
19675 case OPC_BALC
: /* OPC_SWC2 */
19676 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19677 /* OPC_BC, OPC_BALC */
19678 gen_compute_compact_branch(ctx
, op
, 0, 0,
19679 sextract32(ctx
->opcode
<< 2, 0, 28));
19681 /* OPC_LWC2, OPC_SWC2 */
19682 /* COP2: Not implemented. */
19683 generate_exception_err(ctx
, EXCP_CpU
, 2);
19686 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
19687 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
19688 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19690 /* OPC_BEQZC, OPC_BNEZC */
19691 gen_compute_compact_branch(ctx
, op
, rs
, 0,
19692 sextract32(ctx
->opcode
<< 2, 0, 23));
19694 /* OPC_JIC, OPC_JIALC */
19695 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
19698 /* OPC_LWC2, OPC_SWC2 */
19699 /* COP2: Not implemented. */
19700 generate_exception_err(ctx
, EXCP_CpU
, 2);
19704 check_insn(ctx
, INSN_LOONGSON2F
);
19705 /* Note that these instructions use different fields. */
19706 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
19710 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19711 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
19712 check_cp1_enabled(ctx
);
19713 op1
= MASK_CP3(ctx
->opcode
);
19717 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19723 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19724 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
19727 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19728 /* Treat as NOP. */
19731 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS32R2
);
19745 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS32R2
);
19746 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
19750 generate_exception_end(ctx
, EXCP_RI
);
19754 generate_exception_err(ctx
, EXCP_CpU
, 1);
19758 #if defined(TARGET_MIPS64)
19759 /* MIPS64 opcodes */
19760 case OPC_LDL
... OPC_LDR
:
19762 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19766 check_insn(ctx
, ISA_MIPS3
);
19767 check_mips_64(ctx
);
19768 gen_ld(ctx
, op
, rt
, rs
, imm
);
19770 case OPC_SDL
... OPC_SDR
:
19771 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19774 check_insn(ctx
, ISA_MIPS3
);
19775 check_mips_64(ctx
);
19776 gen_st(ctx
, op
, rt
, rs
, imm
);
19779 check_insn_opc_removed(ctx
, ISA_MIPS32R6
);
19780 check_insn(ctx
, ISA_MIPS3
);
19781 check_mips_64(ctx
);
19782 gen_st_cond(ctx
, op
, rt
, rs
, imm
);
19784 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
19785 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19786 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
19787 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19790 check_insn(ctx
, ISA_MIPS3
);
19791 check_mips_64(ctx
);
19792 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19796 check_insn(ctx
, ISA_MIPS3
);
19797 check_mips_64(ctx
);
19798 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
19801 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
19802 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19803 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
19805 MIPS_INVAL("major opcode");
19806 generate_exception_end(ctx
, EXCP_RI
);
19810 case OPC_DAUI
: /* OPC_JALX */
19811 if (ctx
->insn_flags
& ISA_MIPS32R6
) {
19812 #if defined(TARGET_MIPS64)
19814 check_mips_64(ctx
);
19816 generate_exception(ctx
, EXCP_RI
);
19817 } else if (rt
!= 0) {
19818 TCGv t0
= tcg_temp_new();
19819 gen_load_gpr(t0
, rs
);
19820 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
19824 generate_exception_end(ctx
, EXCP_RI
);
19825 MIPS_INVAL("major opcode");
19829 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
19830 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
19831 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
19834 case OPC_MSA
: /* OPC_MDMX */
19835 /* MDMX: Not implemented. */
19839 check_insn(ctx
, ISA_MIPS32R6
);
19840 gen_pcrel(ctx
, ctx
->opcode
, ctx
->pc
, rs
);
19842 default: /* Invalid */
19843 MIPS_INVAL("major opcode");
19844 generate_exception_end(ctx
, EXCP_RI
);
19849 void gen_intermediate_code(CPUMIPSState
*env
, struct TranslationBlock
*tb
)
19851 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
19852 CPUState
*cs
= CPU(cpu
);
19854 target_ulong pc_start
;
19855 target_ulong next_page_start
;
19862 next_page_start
= (pc_start
& TARGET_PAGE_MASK
) + TARGET_PAGE_SIZE
;
19865 ctx
.singlestep_enabled
= cs
->singlestep_enabled
;
19866 ctx
.insn_flags
= env
->insn_flags
;
19867 ctx
.CP0_Config1
= env
->CP0_Config1
;
19869 ctx
.bstate
= BS_NONE
;
19871 ctx
.kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
19872 ctx
.rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
19873 ctx
.ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
19874 ctx
.bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
19875 ctx
.bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
19876 ctx
.PAMask
= env
->PAMask
;
19877 ctx
.mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
19878 ctx
.CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
19879 ctx
.cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
19880 /* Restore delay slot state from the tb context. */
19881 ctx
.hflags
= (uint32_t)tb
->flags
; /* FIXME: maybe use 64 bits here? */
19882 ctx
.ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
19883 ctx
.ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
19884 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
19885 ctx
.vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
19886 ctx
.mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
19887 ctx
.nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
19888 ctx
.abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
19889 restore_cpu_state(env
, &ctx
);
19890 #ifdef CONFIG_USER_ONLY
19891 ctx
.mem_idx
= MIPS_HFLAG_UM
;
19893 ctx
.mem_idx
= ctx
.hflags
& MIPS_HFLAG_KSU
;
19895 ctx
.default_tcg_memop_mask
= (ctx
.insn_flags
& ISA_MIPS32R6
) ?
19896 MO_UNALN
: MO_ALIGN
;
19898 max_insns
= tb
->cflags
& CF_COUNT_MASK
;
19899 if (max_insns
== 0) {
19900 max_insns
= CF_COUNT_MASK
;
19902 if (max_insns
> TCG_MAX_INSNS
) {
19903 max_insns
= TCG_MAX_INSNS
;
19906 LOG_DISAS("\ntb %p idx %d hflags %04x\n", tb
, ctx
.mem_idx
, ctx
.hflags
);
19908 while (ctx
.bstate
== BS_NONE
) {
19909 tcg_gen_insn_start(ctx
.pc
, ctx
.hflags
& MIPS_HFLAG_BMASK
, ctx
.btarget
);
19912 if (unlikely(cpu_breakpoint_test(cs
, ctx
.pc
, BP_ANY
))) {
19913 save_cpu_state(&ctx
, 1);
19914 ctx
.bstate
= BS_BRANCH
;
19915 gen_helper_raise_exception_debug(cpu_env
);
19916 /* The address covered by the breakpoint must be included in
19917 [tb->pc, tb->pc + tb->size) in order to for it to be
19918 properly cleared -- thus we increment the PC here so that
19919 the logic setting tb->size below does the right thing. */
19921 goto done_generating
;
19924 if (num_insns
== max_insns
&& (tb
->cflags
& CF_LAST_IO
)) {
19928 is_slot
= ctx
.hflags
& MIPS_HFLAG_BMASK
;
19929 if (!(ctx
.hflags
& MIPS_HFLAG_M16
)) {
19930 ctx
.opcode
= cpu_ldl_code(env
, ctx
.pc
);
19932 decode_opc(env
, &ctx
);
19933 } else if (ctx
.insn_flags
& ASE_MICROMIPS
) {
19934 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19935 insn_bytes
= decode_micromips_opc(env
, &ctx
);
19936 } else if (ctx
.insn_flags
& ASE_MIPS16
) {
19937 ctx
.opcode
= cpu_lduw_code(env
, ctx
.pc
);
19938 insn_bytes
= decode_mips16_opc(env
, &ctx
);
19940 generate_exception_end(&ctx
, EXCP_RI
);
19944 if (ctx
.hflags
& MIPS_HFLAG_BMASK
) {
19945 if (!(ctx
.hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
19946 MIPS_HFLAG_FBNSLOT
))) {
19947 /* force to generate branch as there is neither delay nor
19951 if ((ctx
.hflags
& MIPS_HFLAG_M16
) &&
19952 (ctx
.hflags
& MIPS_HFLAG_FBNSLOT
)) {
19953 /* Force to generate branch as microMIPS R6 doesn't restrict
19954 branches in the forbidden slot. */
19959 gen_branch(&ctx
, insn_bytes
);
19961 ctx
.pc
+= insn_bytes
;
19963 /* Execute a branch and its delay slot as a single instruction.
19964 This is what GDB expects and is consistent with what the
19965 hardware does (e.g. if a delay slot instruction faults, the
19966 reported PC is the PC of the branch). */
19967 if (cs
->singlestep_enabled
&& (ctx
.hflags
& MIPS_HFLAG_BMASK
) == 0) {
19971 if (ctx
.pc
>= next_page_start
) {
19975 if (tcg_op_buf_full()) {
19979 if (num_insns
>= max_insns
)
19985 if (tb
->cflags
& CF_LAST_IO
) {
19988 if (cs
->singlestep_enabled
&& ctx
.bstate
!= BS_BRANCH
) {
19989 save_cpu_state(&ctx
, ctx
.bstate
!= BS_EXCP
);
19990 gen_helper_raise_exception_debug(cpu_env
);
19992 switch (ctx
.bstate
) {
19994 gen_goto_tb(&ctx
, 0, ctx
.pc
);
19997 save_cpu_state(&ctx
, 0);
19998 gen_goto_tb(&ctx
, 0, ctx
.pc
);
20001 tcg_gen_exit_tb(0);
20009 gen_tb_end(tb
, num_insns
);
20011 tb
->size
= ctx
.pc
- pc_start
;
20012 tb
->icount
= num_insns
;
20016 if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM
)
20017 && qemu_log_in_addr_range(pc_start
)) {
20018 qemu_log("IN: %s\n", lookup_symbol(pc_start
));
20019 log_target_disas(cs
, pc_start
, ctx
.pc
- pc_start
, 0);
20025 static void fpu_dump_state(CPUMIPSState
*env
, FILE *f
, fprintf_function fpu_fprintf
,
20029 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
20031 #define printfpr(fp) \
20034 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20035 " fd:%13g fs:%13g psu: %13g\n", \
20036 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
20037 (double)(fp)->fd, \
20038 (double)(fp)->fs[FP_ENDIAN_IDX], \
20039 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
20042 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
20043 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
20044 fpu_fprintf(f, "w:%08x d:%016" PRIx64 \
20045 " fd:%13g fs:%13g psu:%13g\n", \
20046 tmp.w[FP_ENDIAN_IDX], tmp.d, \
20048 (double)tmp.fs[FP_ENDIAN_IDX], \
20049 (double)tmp.fs[!FP_ENDIAN_IDX]); \
20054 fpu_fprintf(f
, "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
20055 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
20056 get_float_exception_flags(&env
->active_fpu
.fp_status
));
20057 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
20058 fpu_fprintf(f
, "%3s: ", fregnames
[i
]);
20059 printfpr(&env
->active_fpu
.fpr
[i
]);
20065 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, fprintf_function cpu_fprintf
,
20068 MIPSCPU
*cpu
= MIPS_CPU(cs
);
20069 CPUMIPSState
*env
= &cpu
->env
;
20072 cpu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
20073 " LO=0x" TARGET_FMT_lx
" ds %04x "
20074 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
20075 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
20076 env
->hflags
, env
->btarget
, env
->bcond
);
20077 for (i
= 0; i
< 32; i
++) {
20079 cpu_fprintf(f
, "GPR%02d:", i
);
20080 cpu_fprintf(f
, " %s " TARGET_FMT_lx
, regnames
[i
], env
->active_tc
.gpr
[i
]);
20082 cpu_fprintf(f
, "\n");
20085 cpu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x" TARGET_FMT_lx
"\n",
20086 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
20087 cpu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
20089 env
->CP0_Config0
, env
->CP0_Config1
, env
->lladdr
);
20090 cpu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
20091 env
->CP0_Config2
, env
->CP0_Config3
);
20092 cpu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
20093 env
->CP0_Config4
, env
->CP0_Config5
);
20094 if (env
->hflags
& MIPS_HFLAG_FPU
)
20095 fpu_dump_state(env
, f
, cpu_fprintf
, flags
);
20098 void mips_tcg_init(void)
20103 /* Initialize various static tables. */
20107 cpu_env
= tcg_global_reg_new_ptr(TCG_AREG0
, "env");
20108 tcg_ctx
.tcg_env
= cpu_env
;
20110 TCGV_UNUSED(cpu_gpr
[0]);
20111 for (i
= 1; i
< 32; i
++)
20112 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
20113 offsetof(CPUMIPSState
, active_tc
.gpr
[i
]),
20116 for (i
= 0; i
< 32; i
++) {
20117 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
20119 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2]);
20120 /* The scalar floating-point unit (FPU) registers are mapped on
20121 * the MSA vector registers. */
20122 fpu_f64
[i
] = msa_wr_d
[i
* 2];
20123 off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[1]);
20124 msa_wr_d
[i
* 2 + 1] =
20125 tcg_global_mem_new_i64(cpu_env
, off
, msaregnames
[i
* 2 + 1]);
20128 cpu_PC
= tcg_global_mem_new(cpu_env
,
20129 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
20130 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
20131 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
20132 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
20134 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
20135 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
20138 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
20139 offsetof(CPUMIPSState
, active_tc
.DSPControl
),
20141 bcond
= tcg_global_mem_new(cpu_env
,
20142 offsetof(CPUMIPSState
, bcond
), "bcond");
20143 btarget
= tcg_global_mem_new(cpu_env
,
20144 offsetof(CPUMIPSState
, btarget
), "btarget");
20145 hflags
= tcg_global_mem_new_i32(cpu_env
,
20146 offsetof(CPUMIPSState
, hflags
), "hflags");
20148 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
20149 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
20151 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
20152 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
20158 #include "translate_init.c"
20160 MIPSCPU
*cpu_mips_init(const char *cpu_model
)
20164 const mips_def_t
*def
;
20166 def
= cpu_mips_find_by_name(cpu_model
);
20169 cpu
= MIPS_CPU(object_new(TYPE_MIPS_CPU
));
20171 env
->cpu_model
= def
;
20172 env
->exception_base
= (int32_t)0xBFC00000;
20174 #ifndef CONFIG_USER_ONLY
20175 mmu_init(env
, def
);
20177 fpu_init(env
, def
);
20178 mvp_init(env
, def
);
20180 object_property_set_bool(OBJECT(cpu
), true, "realized", NULL
);
20185 bool cpu_supports_cps_smp(const char *cpu_model
)
20187 const mips_def_t
*def
= cpu_mips_find_by_name(cpu_model
);
20192 return (def
->CP0_Config3
& (1 << CP0C3_CMGCR
)) != 0;
20195 void cpu_set_exception_base(int vp_index
, target_ulong address
)
20197 MIPSCPU
*vp
= MIPS_CPU(qemu_get_cpu(vp_index
));
20198 vp
->env
.exception_base
= address
;
20201 void cpu_state_reset(CPUMIPSState
*env
)
20203 MIPSCPU
*cpu
= mips_env_get_cpu(env
);
20204 CPUState
*cs
= CPU(cpu
);
20206 /* Reset registers to their default values */
20207 env
->CP0_PRid
= env
->cpu_model
->CP0_PRid
;
20208 env
->CP0_Config0
= env
->cpu_model
->CP0_Config0
;
20209 #ifdef TARGET_WORDS_BIGENDIAN
20210 env
->CP0_Config0
|= (1 << CP0C0_BE
);
20212 env
->CP0_Config1
= env
->cpu_model
->CP0_Config1
;
20213 env
->CP0_Config2
= env
->cpu_model
->CP0_Config2
;
20214 env
->CP0_Config3
= env
->cpu_model
->CP0_Config3
;
20215 env
->CP0_Config4
= env
->cpu_model
->CP0_Config4
;
20216 env
->CP0_Config4_rw_bitmask
= env
->cpu_model
->CP0_Config4_rw_bitmask
;
20217 env
->CP0_Config5
= env
->cpu_model
->CP0_Config5
;
20218 env
->CP0_Config5_rw_bitmask
= env
->cpu_model
->CP0_Config5_rw_bitmask
;
20219 env
->CP0_Config6
= env
->cpu_model
->CP0_Config6
;
20220 env
->CP0_Config7
= env
->cpu_model
->CP0_Config7
;
20221 env
->CP0_LLAddr_rw_bitmask
= env
->cpu_model
->CP0_LLAddr_rw_bitmask
20222 << env
->cpu_model
->CP0_LLAddr_shift
;
20223 env
->CP0_LLAddr_shift
= env
->cpu_model
->CP0_LLAddr_shift
;
20224 env
->SYNCI_Step
= env
->cpu_model
->SYNCI_Step
;
20225 env
->CCRes
= env
->cpu_model
->CCRes
;
20226 env
->CP0_Status_rw_bitmask
= env
->cpu_model
->CP0_Status_rw_bitmask
;
20227 env
->CP0_TCStatus_rw_bitmask
= env
->cpu_model
->CP0_TCStatus_rw_bitmask
;
20228 env
->CP0_SRSCtl
= env
->cpu_model
->CP0_SRSCtl
;
20229 env
->current_tc
= 0;
20230 env
->SEGBITS
= env
->cpu_model
->SEGBITS
;
20231 env
->SEGMask
= (target_ulong
)((1ULL << env
->cpu_model
->SEGBITS
) - 1);
20232 #if defined(TARGET_MIPS64)
20233 if (env
->cpu_model
->insn_flags
& ISA_MIPS3
) {
20234 env
->SEGMask
|= 3ULL << 62;
20237 env
->PABITS
= env
->cpu_model
->PABITS
;
20238 env
->CP0_SRSConf0_rw_bitmask
= env
->cpu_model
->CP0_SRSConf0_rw_bitmask
;
20239 env
->CP0_SRSConf0
= env
->cpu_model
->CP0_SRSConf0
;
20240 env
->CP0_SRSConf1_rw_bitmask
= env
->cpu_model
->CP0_SRSConf1_rw_bitmask
;
20241 env
->CP0_SRSConf1
= env
->cpu_model
->CP0_SRSConf1
;
20242 env
->CP0_SRSConf2_rw_bitmask
= env
->cpu_model
->CP0_SRSConf2_rw_bitmask
;
20243 env
->CP0_SRSConf2
= env
->cpu_model
->CP0_SRSConf2
;
20244 env
->CP0_SRSConf3_rw_bitmask
= env
->cpu_model
->CP0_SRSConf3_rw_bitmask
;
20245 env
->CP0_SRSConf3
= env
->cpu_model
->CP0_SRSConf3
;
20246 env
->CP0_SRSConf4_rw_bitmask
= env
->cpu_model
->CP0_SRSConf4_rw_bitmask
;
20247 env
->CP0_SRSConf4
= env
->cpu_model
->CP0_SRSConf4
;
20248 env
->CP0_PageGrain_rw_bitmask
= env
->cpu_model
->CP0_PageGrain_rw_bitmask
;
20249 env
->CP0_PageGrain
= env
->cpu_model
->CP0_PageGrain
;
20250 env
->active_fpu
.fcr0
= env
->cpu_model
->CP1_fcr0
;
20251 env
->active_fpu
.fcr31_rw_bitmask
= env
->cpu_model
->CP1_fcr31_rw_bitmask
;
20252 env
->active_fpu
.fcr31
= env
->cpu_model
->CP1_fcr31
;
20253 env
->msair
= env
->cpu_model
->MSAIR
;
20254 env
->insn_flags
= env
->cpu_model
->insn_flags
;
20256 #if defined(CONFIG_USER_ONLY)
20257 env
->CP0_Status
= (MIPS_HFLAG_UM
<< CP0St_KSU
);
20258 # ifdef TARGET_MIPS64
20259 /* Enable 64-bit register mode. */
20260 env
->CP0_Status
|= (1 << CP0St_PX
);
20262 # ifdef TARGET_ABI_MIPSN64
20263 /* Enable 64-bit address mode. */
20264 env
->CP0_Status
|= (1 << CP0St_UX
);
20266 /* Enable access to the CPUNum, SYNCI_Step, CC, and CCRes RDHWR
20267 hardware registers. */
20268 env
->CP0_HWREna
|= 0x0000000F;
20269 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
20270 env
->CP0_Status
|= (1 << CP0St_CU1
);
20272 if (env
->CP0_Config3
& (1 << CP0C3_DSPP
)) {
20273 env
->CP0_Status
|= (1 << CP0St_MX
);
20275 # if defined(TARGET_MIPS64)
20276 /* For MIPS64, init FR bit to 1 if FPU unit is there and bit is writable. */
20277 if ((env
->CP0_Config1
& (1 << CP0C1_FP
)) &&
20278 (env
->CP0_Status_rw_bitmask
& (1 << CP0St_FR
))) {
20279 env
->CP0_Status
|= (1 << CP0St_FR
);
20283 if (env
->hflags
& MIPS_HFLAG_BMASK
) {
20284 /* If the exception was raised from a delay slot,
20285 come back to the jump. */
20286 env
->CP0_ErrorEPC
= (env
->active_tc
.PC
20287 - (env
->hflags
& MIPS_HFLAG_B16
? 2 : 4));
20289 env
->CP0_ErrorEPC
= env
->active_tc
.PC
;
20291 env
->active_tc
.PC
= env
->exception_base
;
20292 env
->CP0_Random
= env
->tlb
->nb_tlb
- 1;
20293 env
->tlb
->tlb_in_use
= env
->tlb
->nb_tlb
;
20294 env
->CP0_Wired
= 0;
20295 env
->CP0_GlobalNumber
= (cs
->cpu_index
& 0xFF) << CP0GN_VPId
;
20296 env
->CP0_EBase
= (cs
->cpu_index
& 0x3FF);
20297 if (kvm_enabled()) {
20298 env
->CP0_EBase
|= 0x40000000;
20300 env
->CP0_EBase
|= 0x80000000;
20302 if (env
->CP0_Config3
& (1 << CP0C3_CMGCR
)) {
20303 env
->CP0_CMGCRBase
= 0x1fbf8000 >> 4;
20305 env
->CP0_EntryHi_ASID_mask
= (env
->CP0_Config4
& (1 << CP0C4_AE
)) ?
20307 env
->CP0_Status
= (1 << CP0St_BEV
) | (1 << CP0St_ERL
);
20308 /* vectored interrupts not implemented, timer on int 7,
20309 no performance counters. */
20310 env
->CP0_IntCtl
= 0xe0000000;
20314 for (i
= 0; i
< 7; i
++) {
20315 env
->CP0_WatchLo
[i
] = 0;
20316 env
->CP0_WatchHi
[i
] = 0x80000000;
20318 env
->CP0_WatchLo
[7] = 0;
20319 env
->CP0_WatchHi
[7] = 0;
20321 /* Count register increments in debug mode, EJTAG version 1 */
20322 env
->CP0_Debug
= (1 << CP0DB_CNT
) | (0x1 << CP0DB_VER
);
20324 cpu_mips_store_count(env
, 1);
20326 if (env
->CP0_Config3
& (1 << CP0C3_MT
)) {
20329 /* Only TC0 on VPE 0 starts as active. */
20330 for (i
= 0; i
< ARRAY_SIZE(env
->tcs
); i
++) {
20331 env
->tcs
[i
].CP0_TCBind
= cs
->cpu_index
<< CP0TCBd_CurVPE
;
20332 env
->tcs
[i
].CP0_TCHalt
= 1;
20334 env
->active_tc
.CP0_TCHalt
= 1;
20337 if (cs
->cpu_index
== 0) {
20338 /* VPE0 starts up enabled. */
20339 env
->mvp
->CP0_MVPControl
|= (1 << CP0MVPCo_EVP
);
20340 env
->CP0_VPEConf0
|= (1 << CP0VPEC0_MVP
) | (1 << CP0VPEC0_VPA
);
20342 /* TC0 starts up unhalted. */
20344 env
->active_tc
.CP0_TCHalt
= 0;
20345 env
->tcs
[0].CP0_TCHalt
= 0;
20346 /* With thread 0 active. */
20347 env
->active_tc
.CP0_TCStatus
= (1 << CP0TCSt_A
);
20348 env
->tcs
[0].CP0_TCStatus
= (1 << CP0TCSt_A
);
20352 if ((env
->insn_flags
& ISA_MIPS32R6
) &&
20353 (env
->active_fpu
.fcr0
& (1 << FCR0_F64
))) {
20354 /* Status.FR = 0 mode in 64-bit FPU not allowed in R6 */
20355 env
->CP0_Status
|= (1 << CP0St_FR
);
20359 if (env
->CP0_Config3
& (1 << CP0C3_MSAP
)) {
20363 compute_hflags(env
);
20364 restore_fp_status(env
);
20365 restore_pamask(env
);
20366 cs
->exception_index
= EXCP_NONE
;
20368 if (semihosting_get_argc()) {
20369 /* UHI interface can be used to obtain argc and argv */
20370 env
->active_tc
.gpr
[4] = -1;
20374 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
20375 target_ulong
*data
)
20377 env
->active_tc
.PC
= data
[0];
20378 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
20379 env
->hflags
|= data
[1];
20380 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
20381 case MIPS_HFLAG_BR
:
20383 case MIPS_HFLAG_BC
:
20384 case MIPS_HFLAG_BL
:
20386 env
->btarget
= data
[2];