2 * MIPS emulation for QEMU - main translation routines
4 * Copyright (c) 2004-2005 Jocelyn Mayer
5 * Copyright (c) 2006 Marius Groeger (FPU operations)
6 * Copyright (c) 2006 Thiemo Seufer (MIPS32R2 support)
7 * Copyright (c) 2009 CodeSourcery (MIPS16 and microMIPS support)
8 * Copyright (c) 2012 Jia Liu & Dongxue Zhang (MIPS ASE DSP support)
9 * Copyright (c) 2020 Philippe Mathieu-Daudé
11 * This library is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU Lesser General Public
13 * License as published by the Free Software Foundation; either
14 * version 2.1 of the License, or (at your option) any later version.
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Lesser General Public License for more details.
21 * You should have received a copy of the GNU Lesser General Public
22 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
25 #include "qemu/osdep.h"
28 #include "tcg/tcg-op.h"
29 #include "exec/translator.h"
30 #include "exec/helper-proto.h"
31 #include "exec/helper-gen.h"
32 #include "semihosting/semihost.h"
34 #include "target/mips/trace.h"
35 #include "trace-tcg.h"
36 #include "exec/translator.h"
38 #include "qemu/qemu-print.h"
39 #include "fpu_helper.h"
40 #include "translate.h"
43 /* indirect opcode tables */
44 OPC_SPECIAL
= (0x00 << 26),
45 OPC_REGIMM
= (0x01 << 26),
46 OPC_CP0
= (0x10 << 26),
47 OPC_CP2
= (0x12 << 26),
48 OPC_CP3
= (0x13 << 26),
49 OPC_SPECIAL2
= (0x1C << 26),
50 OPC_SPECIAL3
= (0x1F << 26),
51 /* arithmetic with immediate */
52 OPC_ADDI
= (0x08 << 26),
53 OPC_ADDIU
= (0x09 << 26),
54 OPC_SLTI
= (0x0A << 26),
55 OPC_SLTIU
= (0x0B << 26),
56 /* logic with immediate */
57 OPC_ANDI
= (0x0C << 26),
58 OPC_ORI
= (0x0D << 26),
59 OPC_XORI
= (0x0E << 26),
60 OPC_LUI
= (0x0F << 26),
61 /* arithmetic with immediate */
62 OPC_DADDI
= (0x18 << 26),
63 OPC_DADDIU
= (0x19 << 26),
64 /* Jump and branches */
66 OPC_JAL
= (0x03 << 26),
67 OPC_BEQ
= (0x04 << 26), /* Unconditional if rs = rt = 0 (B) */
68 OPC_BEQL
= (0x14 << 26),
69 OPC_BNE
= (0x05 << 26),
70 OPC_BNEL
= (0x15 << 26),
71 OPC_BLEZ
= (0x06 << 26),
72 OPC_BLEZL
= (0x16 << 26),
73 OPC_BGTZ
= (0x07 << 26),
74 OPC_BGTZL
= (0x17 << 26),
75 OPC_JALX
= (0x1D << 26),
76 OPC_DAUI
= (0x1D << 26),
78 OPC_LDL
= (0x1A << 26),
79 OPC_LDR
= (0x1B << 26),
80 OPC_LB
= (0x20 << 26),
81 OPC_LH
= (0x21 << 26),
82 OPC_LWL
= (0x22 << 26),
83 OPC_LW
= (0x23 << 26),
84 OPC_LWPC
= OPC_LW
| 0x5,
85 OPC_LBU
= (0x24 << 26),
86 OPC_LHU
= (0x25 << 26),
87 OPC_LWR
= (0x26 << 26),
88 OPC_LWU
= (0x27 << 26),
89 OPC_SB
= (0x28 << 26),
90 OPC_SH
= (0x29 << 26),
91 OPC_SWL
= (0x2A << 26),
92 OPC_SW
= (0x2B << 26),
93 OPC_SDL
= (0x2C << 26),
94 OPC_SDR
= (0x2D << 26),
95 OPC_SWR
= (0x2E << 26),
96 OPC_LL
= (0x30 << 26),
97 OPC_LLD
= (0x34 << 26),
98 OPC_LD
= (0x37 << 26),
99 OPC_LDPC
= OPC_LD
| 0x5,
100 OPC_SC
= (0x38 << 26),
101 OPC_SCD
= (0x3C << 26),
102 OPC_SD
= (0x3F << 26),
103 /* Floating point load/store */
104 OPC_LWC1
= (0x31 << 26),
105 OPC_LWC2
= (0x32 << 26),
106 OPC_LDC1
= (0x35 << 26),
107 OPC_LDC2
= (0x36 << 26),
108 OPC_SWC1
= (0x39 << 26),
109 OPC_SWC2
= (0x3A << 26),
110 OPC_SDC1
= (0x3D << 26),
111 OPC_SDC2
= (0x3E << 26),
112 /* Compact Branches */
113 OPC_BLEZALC
= (0x06 << 26),
114 OPC_BGEZALC
= (0x06 << 26),
115 OPC_BGEUC
= (0x06 << 26),
116 OPC_BGTZALC
= (0x07 << 26),
117 OPC_BLTZALC
= (0x07 << 26),
118 OPC_BLTUC
= (0x07 << 26),
119 OPC_BOVC
= (0x08 << 26),
120 OPC_BEQZALC
= (0x08 << 26),
121 OPC_BEQC
= (0x08 << 26),
122 OPC_BLEZC
= (0x16 << 26),
123 OPC_BGEZC
= (0x16 << 26),
124 OPC_BGEC
= (0x16 << 26),
125 OPC_BGTZC
= (0x17 << 26),
126 OPC_BLTZC
= (0x17 << 26),
127 OPC_BLTC
= (0x17 << 26),
128 OPC_BNVC
= (0x18 << 26),
129 OPC_BNEZALC
= (0x18 << 26),
130 OPC_BNEC
= (0x18 << 26),
131 OPC_BC
= (0x32 << 26),
132 OPC_BEQZC
= (0x36 << 26),
133 OPC_JIC
= (0x36 << 26),
134 OPC_BALC
= (0x3A << 26),
135 OPC_BNEZC
= (0x3E << 26),
136 OPC_JIALC
= (0x3E << 26),
137 /* MDMX ASE specific */
138 OPC_MDMX
= (0x1E << 26),
139 /* Cache and prefetch */
140 OPC_CACHE
= (0x2F << 26),
141 OPC_PREF
= (0x33 << 26),
142 /* PC-relative address computation / loads */
143 OPC_PCREL
= (0x3B << 26),
146 /* PC-relative address computation / loads */
147 #define MASK_OPC_PCREL_TOP2BITS(op) (MASK_OP_MAJOR(op) | (op & (3 << 19)))
148 #define MASK_OPC_PCREL_TOP5BITS(op) (MASK_OP_MAJOR(op) | (op & (0x1f << 16)))
150 /* Instructions determined by bits 19 and 20 */
151 OPC_ADDIUPC
= OPC_PCREL
| (0 << 19),
152 R6_OPC_LWPC
= OPC_PCREL
| (1 << 19),
153 OPC_LWUPC
= OPC_PCREL
| (2 << 19),
155 /* Instructions determined by bits 16 ... 20 */
156 OPC_AUIPC
= OPC_PCREL
| (0x1e << 16),
157 OPC_ALUIPC
= OPC_PCREL
| (0x1f << 16),
160 R6_OPC_LDPC
= OPC_PCREL
| (6 << 18),
163 /* MIPS special opcodes */
164 #define MASK_SPECIAL(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
168 OPC_SLL
= 0x00 | OPC_SPECIAL
,
169 /* NOP is SLL r0, r0, 0 */
170 /* SSNOP is SLL r0, r0, 1 */
171 /* EHB is SLL r0, r0, 3 */
172 OPC_SRL
= 0x02 | OPC_SPECIAL
, /* also ROTR */
173 OPC_ROTR
= OPC_SRL
| (1 << 21),
174 OPC_SRA
= 0x03 | OPC_SPECIAL
,
175 OPC_SLLV
= 0x04 | OPC_SPECIAL
,
176 OPC_SRLV
= 0x06 | OPC_SPECIAL
, /* also ROTRV */
177 OPC_ROTRV
= OPC_SRLV
| (1 << 6),
178 OPC_SRAV
= 0x07 | OPC_SPECIAL
,
179 OPC_DSLLV
= 0x14 | OPC_SPECIAL
,
180 OPC_DSRLV
= 0x16 | OPC_SPECIAL
, /* also DROTRV */
181 OPC_DROTRV
= OPC_DSRLV
| (1 << 6),
182 OPC_DSRAV
= 0x17 | OPC_SPECIAL
,
183 OPC_DSLL
= 0x38 | OPC_SPECIAL
,
184 OPC_DSRL
= 0x3A | OPC_SPECIAL
, /* also DROTR */
185 OPC_DROTR
= OPC_DSRL
| (1 << 21),
186 OPC_DSRA
= 0x3B | OPC_SPECIAL
,
187 OPC_DSLL32
= 0x3C | OPC_SPECIAL
,
188 OPC_DSRL32
= 0x3E | OPC_SPECIAL
, /* also DROTR32 */
189 OPC_DROTR32
= OPC_DSRL32
| (1 << 21),
190 OPC_DSRA32
= 0x3F | OPC_SPECIAL
,
191 /* Multiplication / division */
192 OPC_MULT
= 0x18 | OPC_SPECIAL
,
193 OPC_MULTU
= 0x19 | OPC_SPECIAL
,
194 OPC_DIV
= 0x1A | OPC_SPECIAL
,
195 OPC_DIVU
= 0x1B | OPC_SPECIAL
,
196 OPC_DMULT
= 0x1C | OPC_SPECIAL
,
197 OPC_DMULTU
= 0x1D | OPC_SPECIAL
,
198 OPC_DDIV
= 0x1E | OPC_SPECIAL
,
199 OPC_DDIVU
= 0x1F | OPC_SPECIAL
,
201 /* 2 registers arithmetic / logic */
202 OPC_ADD
= 0x20 | OPC_SPECIAL
,
203 OPC_ADDU
= 0x21 | OPC_SPECIAL
,
204 OPC_SUB
= 0x22 | OPC_SPECIAL
,
205 OPC_SUBU
= 0x23 | OPC_SPECIAL
,
206 OPC_AND
= 0x24 | OPC_SPECIAL
,
207 OPC_OR
= 0x25 | OPC_SPECIAL
,
208 OPC_XOR
= 0x26 | OPC_SPECIAL
,
209 OPC_NOR
= 0x27 | OPC_SPECIAL
,
210 OPC_SLT
= 0x2A | OPC_SPECIAL
,
211 OPC_SLTU
= 0x2B | OPC_SPECIAL
,
212 OPC_DADD
= 0x2C | OPC_SPECIAL
,
213 OPC_DADDU
= 0x2D | OPC_SPECIAL
,
214 OPC_DSUB
= 0x2E | OPC_SPECIAL
,
215 OPC_DSUBU
= 0x2F | OPC_SPECIAL
,
217 OPC_JR
= 0x08 | OPC_SPECIAL
, /* Also JR.HB */
218 OPC_JALR
= 0x09 | OPC_SPECIAL
, /* Also JALR.HB */
220 OPC_TGE
= 0x30 | OPC_SPECIAL
,
221 OPC_TGEU
= 0x31 | OPC_SPECIAL
,
222 OPC_TLT
= 0x32 | OPC_SPECIAL
,
223 OPC_TLTU
= 0x33 | OPC_SPECIAL
,
224 OPC_TEQ
= 0x34 | OPC_SPECIAL
,
225 OPC_TNE
= 0x36 | OPC_SPECIAL
,
226 /* HI / LO registers load & stores */
227 OPC_MFHI
= 0x10 | OPC_SPECIAL
,
228 OPC_MTHI
= 0x11 | OPC_SPECIAL
,
229 OPC_MFLO
= 0x12 | OPC_SPECIAL
,
230 OPC_MTLO
= 0x13 | OPC_SPECIAL
,
231 /* Conditional moves */
232 OPC_MOVZ
= 0x0A | OPC_SPECIAL
,
233 OPC_MOVN
= 0x0B | OPC_SPECIAL
,
235 OPC_SELEQZ
= 0x35 | OPC_SPECIAL
,
236 OPC_SELNEZ
= 0x37 | OPC_SPECIAL
,
238 OPC_MOVCI
= 0x01 | OPC_SPECIAL
,
241 OPC_PMON
= 0x05 | OPC_SPECIAL
, /* unofficial */
242 OPC_SYSCALL
= 0x0C | OPC_SPECIAL
,
243 OPC_BREAK
= 0x0D | OPC_SPECIAL
,
244 OPC_SPIM
= 0x0E | OPC_SPECIAL
, /* unofficial */
245 OPC_SYNC
= 0x0F | OPC_SPECIAL
,
247 OPC_SPECIAL28_RESERVED
= 0x28 | OPC_SPECIAL
,
248 OPC_SPECIAL29_RESERVED
= 0x29 | OPC_SPECIAL
,
249 OPC_SPECIAL39_RESERVED
= 0x39 | OPC_SPECIAL
,
250 OPC_SPECIAL3D_RESERVED
= 0x3D | OPC_SPECIAL
,
254 * R6 Multiply and Divide instructions have the same opcode
255 * and function field as legacy OPC_MULT[U]/OPC_DIV[U]
257 #define MASK_R6_MULDIV(op) (MASK_SPECIAL(op) | (op & (0x7ff)))
260 R6_OPC_MUL
= OPC_MULT
| (2 << 6),
261 R6_OPC_MUH
= OPC_MULT
| (3 << 6),
262 R6_OPC_MULU
= OPC_MULTU
| (2 << 6),
263 R6_OPC_MUHU
= OPC_MULTU
| (3 << 6),
264 R6_OPC_DIV
= OPC_DIV
| (2 << 6),
265 R6_OPC_MOD
= OPC_DIV
| (3 << 6),
266 R6_OPC_DIVU
= OPC_DIVU
| (2 << 6),
267 R6_OPC_MODU
= OPC_DIVU
| (3 << 6),
269 R6_OPC_DMUL
= OPC_DMULT
| (2 << 6),
270 R6_OPC_DMUH
= OPC_DMULT
| (3 << 6),
271 R6_OPC_DMULU
= OPC_DMULTU
| (2 << 6),
272 R6_OPC_DMUHU
= OPC_DMULTU
| (3 << 6),
273 R6_OPC_DDIV
= OPC_DDIV
| (2 << 6),
274 R6_OPC_DMOD
= OPC_DDIV
| (3 << 6),
275 R6_OPC_DDIVU
= OPC_DDIVU
| (2 << 6),
276 R6_OPC_DMODU
= OPC_DDIVU
| (3 << 6),
278 R6_OPC_CLZ
= 0x10 | OPC_SPECIAL
,
279 R6_OPC_CLO
= 0x11 | OPC_SPECIAL
,
280 R6_OPC_DCLZ
= 0x12 | OPC_SPECIAL
,
281 R6_OPC_DCLO
= 0x13 | OPC_SPECIAL
,
282 R6_OPC_SDBBP
= 0x0e | OPC_SPECIAL
,
285 /* Multiplication variants of the vr54xx. */
286 #define MASK_MUL_VR54XX(op) (MASK_SPECIAL(op) | (op & (0x1F << 6)))
289 OPC_VR54XX_MULS
= (0x03 << 6) | OPC_MULT
,
290 OPC_VR54XX_MULSU
= (0x03 << 6) | OPC_MULTU
,
291 OPC_VR54XX_MACC
= (0x05 << 6) | OPC_MULT
,
292 OPC_VR54XX_MACCU
= (0x05 << 6) | OPC_MULTU
,
293 OPC_VR54XX_MSAC
= (0x07 << 6) | OPC_MULT
,
294 OPC_VR54XX_MSACU
= (0x07 << 6) | OPC_MULTU
,
295 OPC_VR54XX_MULHI
= (0x09 << 6) | OPC_MULT
,
296 OPC_VR54XX_MULHIU
= (0x09 << 6) | OPC_MULTU
,
297 OPC_VR54XX_MULSHI
= (0x0B << 6) | OPC_MULT
,
298 OPC_VR54XX_MULSHIU
= (0x0B << 6) | OPC_MULTU
,
299 OPC_VR54XX_MACCHI
= (0x0D << 6) | OPC_MULT
,
300 OPC_VR54XX_MACCHIU
= (0x0D << 6) | OPC_MULTU
,
301 OPC_VR54XX_MSACHI
= (0x0F << 6) | OPC_MULT
,
302 OPC_VR54XX_MSACHIU
= (0x0F << 6) | OPC_MULTU
,
305 /* REGIMM (rt field) opcodes */
306 #define MASK_REGIMM(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 16)))
309 OPC_BLTZ
= (0x00 << 16) | OPC_REGIMM
,
310 OPC_BLTZL
= (0x02 << 16) | OPC_REGIMM
,
311 OPC_BGEZ
= (0x01 << 16) | OPC_REGIMM
,
312 OPC_BGEZL
= (0x03 << 16) | OPC_REGIMM
,
313 OPC_BLTZAL
= (0x10 << 16) | OPC_REGIMM
,
314 OPC_BLTZALL
= (0x12 << 16) | OPC_REGIMM
,
315 OPC_BGEZAL
= (0x11 << 16) | OPC_REGIMM
,
316 OPC_BGEZALL
= (0x13 << 16) | OPC_REGIMM
,
317 OPC_TGEI
= (0x08 << 16) | OPC_REGIMM
,
318 OPC_TGEIU
= (0x09 << 16) | OPC_REGIMM
,
319 OPC_TLTI
= (0x0A << 16) | OPC_REGIMM
,
320 OPC_TLTIU
= (0x0B << 16) | OPC_REGIMM
,
321 OPC_TEQI
= (0x0C << 16) | OPC_REGIMM
,
322 OPC_TNEI
= (0x0E << 16) | OPC_REGIMM
,
323 OPC_SIGRIE
= (0x17 << 16) | OPC_REGIMM
,
324 OPC_SYNCI
= (0x1F << 16) | OPC_REGIMM
,
326 OPC_DAHI
= (0x06 << 16) | OPC_REGIMM
,
327 OPC_DATI
= (0x1e << 16) | OPC_REGIMM
,
330 /* Special2 opcodes */
331 #define MASK_SPECIAL2(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
334 /* Multiply & xxx operations */
335 OPC_MADD
= 0x00 | OPC_SPECIAL2
,
336 OPC_MADDU
= 0x01 | OPC_SPECIAL2
,
337 OPC_MUL
= 0x02 | OPC_SPECIAL2
,
338 OPC_MSUB
= 0x04 | OPC_SPECIAL2
,
339 OPC_MSUBU
= 0x05 | OPC_SPECIAL2
,
341 OPC_MULT_G_2F
= 0x10 | OPC_SPECIAL2
,
342 OPC_DMULT_G_2F
= 0x11 | OPC_SPECIAL2
,
343 OPC_MULTU_G_2F
= 0x12 | OPC_SPECIAL2
,
344 OPC_DMULTU_G_2F
= 0x13 | OPC_SPECIAL2
,
345 OPC_DIV_G_2F
= 0x14 | OPC_SPECIAL2
,
346 OPC_DDIV_G_2F
= 0x15 | OPC_SPECIAL2
,
347 OPC_DIVU_G_2F
= 0x16 | OPC_SPECIAL2
,
348 OPC_DDIVU_G_2F
= 0x17 | OPC_SPECIAL2
,
349 OPC_MOD_G_2F
= 0x1c | OPC_SPECIAL2
,
350 OPC_DMOD_G_2F
= 0x1d | OPC_SPECIAL2
,
351 OPC_MODU_G_2F
= 0x1e | OPC_SPECIAL2
,
352 OPC_DMODU_G_2F
= 0x1f | OPC_SPECIAL2
,
354 OPC_CLZ
= 0x20 | OPC_SPECIAL2
,
355 OPC_CLO
= 0x21 | OPC_SPECIAL2
,
356 OPC_DCLZ
= 0x24 | OPC_SPECIAL2
,
357 OPC_DCLO
= 0x25 | OPC_SPECIAL2
,
359 OPC_SDBBP
= 0x3F | OPC_SPECIAL2
,
362 /* Special3 opcodes */
363 #define MASK_SPECIAL3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
366 OPC_EXT
= 0x00 | OPC_SPECIAL3
,
367 OPC_DEXTM
= 0x01 | OPC_SPECIAL3
,
368 OPC_DEXTU
= 0x02 | OPC_SPECIAL3
,
369 OPC_DEXT
= 0x03 | OPC_SPECIAL3
,
370 OPC_INS
= 0x04 | OPC_SPECIAL3
,
371 OPC_DINSM
= 0x05 | OPC_SPECIAL3
,
372 OPC_DINSU
= 0x06 | OPC_SPECIAL3
,
373 OPC_DINS
= 0x07 | OPC_SPECIAL3
,
374 OPC_FORK
= 0x08 | OPC_SPECIAL3
,
375 OPC_YIELD
= 0x09 | OPC_SPECIAL3
,
376 OPC_BSHFL
= 0x20 | OPC_SPECIAL3
,
377 OPC_DBSHFL
= 0x24 | OPC_SPECIAL3
,
378 OPC_RDHWR
= 0x3B | OPC_SPECIAL3
,
379 OPC_GINV
= 0x3D | OPC_SPECIAL3
,
382 OPC_MULT_G_2E
= 0x18 | OPC_SPECIAL3
,
383 OPC_MULTU_G_2E
= 0x19 | OPC_SPECIAL3
,
384 OPC_DIV_G_2E
= 0x1A | OPC_SPECIAL3
,
385 OPC_DIVU_G_2E
= 0x1B | OPC_SPECIAL3
,
386 OPC_DMULT_G_2E
= 0x1C | OPC_SPECIAL3
,
387 OPC_DMULTU_G_2E
= 0x1D | OPC_SPECIAL3
,
388 OPC_DDIV_G_2E
= 0x1E | OPC_SPECIAL3
,
389 OPC_DDIVU_G_2E
= 0x1F | OPC_SPECIAL3
,
390 OPC_MOD_G_2E
= 0x22 | OPC_SPECIAL3
,
391 OPC_MODU_G_2E
= 0x23 | OPC_SPECIAL3
,
392 OPC_DMOD_G_2E
= 0x26 | OPC_SPECIAL3
,
393 OPC_DMODU_G_2E
= 0x27 | OPC_SPECIAL3
,
396 OPC_LX_DSP
= 0x0A | OPC_SPECIAL3
,
397 /* MIPS DSP Arithmetic */
398 OPC_ADDU_QB_DSP
= 0x10 | OPC_SPECIAL3
,
399 OPC_ADDU_OB_DSP
= 0x14 | OPC_SPECIAL3
,
400 OPC_ABSQ_S_PH_DSP
= 0x12 | OPC_SPECIAL3
,
401 OPC_ABSQ_S_QH_DSP
= 0x16 | OPC_SPECIAL3
,
402 /* OPC_ADDUH_QB_DSP is same as OPC_MULT_G_2E. */
403 /* OPC_ADDUH_QB_DSP = 0x18 | OPC_SPECIAL3, */
404 OPC_CMPU_EQ_QB_DSP
= 0x11 | OPC_SPECIAL3
,
405 OPC_CMPU_EQ_OB_DSP
= 0x15 | OPC_SPECIAL3
,
406 /* MIPS DSP GPR-Based Shift Sub-class */
407 OPC_SHLL_QB_DSP
= 0x13 | OPC_SPECIAL3
,
408 OPC_SHLL_OB_DSP
= 0x17 | OPC_SPECIAL3
,
409 /* MIPS DSP Multiply Sub-class insns */
410 /* OPC_MUL_PH_DSP is same as OPC_ADDUH_QB_DSP. */
411 /* OPC_MUL_PH_DSP = 0x18 | OPC_SPECIAL3, */
412 OPC_DPA_W_PH_DSP
= 0x30 | OPC_SPECIAL3
,
413 OPC_DPAQ_W_QH_DSP
= 0x34 | OPC_SPECIAL3
,
414 /* DSP Bit/Manipulation Sub-class */
415 OPC_INSV_DSP
= 0x0C | OPC_SPECIAL3
,
416 OPC_DINSV_DSP
= 0x0D | OPC_SPECIAL3
,
417 /* MIPS DSP Append Sub-class */
418 OPC_APPEND_DSP
= 0x31 | OPC_SPECIAL3
,
419 OPC_DAPPEND_DSP
= 0x35 | OPC_SPECIAL3
,
420 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
421 OPC_EXTR_W_DSP
= 0x38 | OPC_SPECIAL3
,
422 OPC_DEXTR_W_DSP
= 0x3C | OPC_SPECIAL3
,
425 OPC_LWLE
= 0x19 | OPC_SPECIAL3
,
426 OPC_LWRE
= 0x1A | OPC_SPECIAL3
,
427 OPC_CACHEE
= 0x1B | OPC_SPECIAL3
,
428 OPC_SBE
= 0x1C | OPC_SPECIAL3
,
429 OPC_SHE
= 0x1D | OPC_SPECIAL3
,
430 OPC_SCE
= 0x1E | OPC_SPECIAL3
,
431 OPC_SWE
= 0x1F | OPC_SPECIAL3
,
432 OPC_SWLE
= 0x21 | OPC_SPECIAL3
,
433 OPC_SWRE
= 0x22 | OPC_SPECIAL3
,
434 OPC_PREFE
= 0x23 | OPC_SPECIAL3
,
435 OPC_LBUE
= 0x28 | OPC_SPECIAL3
,
436 OPC_LHUE
= 0x29 | OPC_SPECIAL3
,
437 OPC_LBE
= 0x2C | OPC_SPECIAL3
,
438 OPC_LHE
= 0x2D | OPC_SPECIAL3
,
439 OPC_LLE
= 0x2E | OPC_SPECIAL3
,
440 OPC_LWE
= 0x2F | OPC_SPECIAL3
,
443 R6_OPC_PREF
= 0x35 | OPC_SPECIAL3
,
444 R6_OPC_CACHE
= 0x25 | OPC_SPECIAL3
,
445 R6_OPC_LL
= 0x36 | OPC_SPECIAL3
,
446 R6_OPC_SC
= 0x26 | OPC_SPECIAL3
,
447 R6_OPC_LLD
= 0x37 | OPC_SPECIAL3
,
448 R6_OPC_SCD
= 0x27 | OPC_SPECIAL3
,
451 /* Loongson EXT load/store quad word opcodes */
452 #define MASK_LOONGSON_GSLSQ(op) (MASK_OP_MAJOR(op) | (op & 0x8020))
454 OPC_GSLQ
= 0x0020 | OPC_LWC2
,
455 OPC_GSLQC1
= 0x8020 | OPC_LWC2
,
456 OPC_GSSHFL
= OPC_LWC2
,
457 OPC_GSSQ
= 0x0020 | OPC_SWC2
,
458 OPC_GSSQC1
= 0x8020 | OPC_SWC2
,
459 OPC_GSSHFS
= OPC_SWC2
,
462 /* Loongson EXT shifted load/store opcodes */
463 #define MASK_LOONGSON_GSSHFLS(op) (MASK_OP_MAJOR(op) | (op & 0xc03f))
465 OPC_GSLWLC1
= 0x4 | OPC_GSSHFL
,
466 OPC_GSLWRC1
= 0x5 | OPC_GSSHFL
,
467 OPC_GSLDLC1
= 0x6 | OPC_GSSHFL
,
468 OPC_GSLDRC1
= 0x7 | OPC_GSSHFL
,
469 OPC_GSSWLC1
= 0x4 | OPC_GSSHFS
,
470 OPC_GSSWRC1
= 0x5 | OPC_GSSHFS
,
471 OPC_GSSDLC1
= 0x6 | OPC_GSSHFS
,
472 OPC_GSSDRC1
= 0x7 | OPC_GSSHFS
,
475 /* Loongson EXT LDC2/SDC2 opcodes */
476 #define MASK_LOONGSON_LSDC2(op) (MASK_OP_MAJOR(op) | (op & 0x7))
479 OPC_GSLBX
= 0x0 | OPC_LDC2
,
480 OPC_GSLHX
= 0x1 | OPC_LDC2
,
481 OPC_GSLWX
= 0x2 | OPC_LDC2
,
482 OPC_GSLDX
= 0x3 | OPC_LDC2
,
483 OPC_GSLWXC1
= 0x6 | OPC_LDC2
,
484 OPC_GSLDXC1
= 0x7 | OPC_LDC2
,
485 OPC_GSSBX
= 0x0 | OPC_SDC2
,
486 OPC_GSSHX
= 0x1 | OPC_SDC2
,
487 OPC_GSSWX
= 0x2 | OPC_SDC2
,
488 OPC_GSSDX
= 0x3 | OPC_SDC2
,
489 OPC_GSSWXC1
= 0x6 | OPC_SDC2
,
490 OPC_GSSDXC1
= 0x7 | OPC_SDC2
,
494 #define MASK_BSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
497 OPC_WSBH
= (0x02 << 6) | OPC_BSHFL
,
498 OPC_SEB
= (0x10 << 6) | OPC_BSHFL
,
499 OPC_SEH
= (0x18 << 6) | OPC_BSHFL
,
500 OPC_ALIGN
= (0x08 << 6) | OPC_BSHFL
, /* 010.bp (010.00 to 010.11) */
501 OPC_ALIGN_1
= (0x09 << 6) | OPC_BSHFL
,
502 OPC_ALIGN_2
= (0x0A << 6) | OPC_BSHFL
,
503 OPC_ALIGN_3
= (0x0B << 6) | OPC_BSHFL
,
504 OPC_BITSWAP
= (0x00 << 6) | OPC_BSHFL
/* 00000 */
508 #define MASK_DBSHFL(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
511 OPC_DSBH
= (0x02 << 6) | OPC_DBSHFL
,
512 OPC_DSHD
= (0x05 << 6) | OPC_DBSHFL
,
513 OPC_DALIGN
= (0x08 << 6) | OPC_DBSHFL
, /* 01.bp (01.000 to 01.111) */
514 OPC_DALIGN_1
= (0x09 << 6) | OPC_DBSHFL
,
515 OPC_DALIGN_2
= (0x0A << 6) | OPC_DBSHFL
,
516 OPC_DALIGN_3
= (0x0B << 6) | OPC_DBSHFL
,
517 OPC_DALIGN_4
= (0x0C << 6) | OPC_DBSHFL
,
518 OPC_DALIGN_5
= (0x0D << 6) | OPC_DBSHFL
,
519 OPC_DALIGN_6
= (0x0E << 6) | OPC_DBSHFL
,
520 OPC_DALIGN_7
= (0x0F << 6) | OPC_DBSHFL
,
521 OPC_DBITSWAP
= (0x00 << 6) | OPC_DBSHFL
, /* 00000 */
524 /* MIPS DSP REGIMM opcodes */
526 OPC_BPOSGE32
= (0x1C << 16) | OPC_REGIMM
,
527 OPC_BPOSGE64
= (0x1D << 16) | OPC_REGIMM
,
530 #define MASK_LX(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
533 OPC_LBUX
= (0x06 << 6) | OPC_LX_DSP
,
534 OPC_LHX
= (0x04 << 6) | OPC_LX_DSP
,
535 OPC_LWX
= (0x00 << 6) | OPC_LX_DSP
,
536 OPC_LDX
= (0x08 << 6) | OPC_LX_DSP
,
539 #define MASK_ADDU_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
541 /* MIPS DSP Arithmetic Sub-class */
542 OPC_ADDQ_PH
= (0x0A << 6) | OPC_ADDU_QB_DSP
,
543 OPC_ADDQ_S_PH
= (0x0E << 6) | OPC_ADDU_QB_DSP
,
544 OPC_ADDQ_S_W
= (0x16 << 6) | OPC_ADDU_QB_DSP
,
545 OPC_ADDU_QB
= (0x00 << 6) | OPC_ADDU_QB_DSP
,
546 OPC_ADDU_S_QB
= (0x04 << 6) | OPC_ADDU_QB_DSP
,
547 OPC_ADDU_PH
= (0x08 << 6) | OPC_ADDU_QB_DSP
,
548 OPC_ADDU_S_PH
= (0x0C << 6) | OPC_ADDU_QB_DSP
,
549 OPC_SUBQ_PH
= (0x0B << 6) | OPC_ADDU_QB_DSP
,
550 OPC_SUBQ_S_PH
= (0x0F << 6) | OPC_ADDU_QB_DSP
,
551 OPC_SUBQ_S_W
= (0x17 << 6) | OPC_ADDU_QB_DSP
,
552 OPC_SUBU_QB
= (0x01 << 6) | OPC_ADDU_QB_DSP
,
553 OPC_SUBU_S_QB
= (0x05 << 6) | OPC_ADDU_QB_DSP
,
554 OPC_SUBU_PH
= (0x09 << 6) | OPC_ADDU_QB_DSP
,
555 OPC_SUBU_S_PH
= (0x0D << 6) | OPC_ADDU_QB_DSP
,
556 OPC_ADDSC
= (0x10 << 6) | OPC_ADDU_QB_DSP
,
557 OPC_ADDWC
= (0x11 << 6) | OPC_ADDU_QB_DSP
,
558 OPC_MODSUB
= (0x12 << 6) | OPC_ADDU_QB_DSP
,
559 OPC_RADDU_W_QB
= (0x14 << 6) | OPC_ADDU_QB_DSP
,
560 /* MIPS DSP Multiply Sub-class insns */
561 OPC_MULEU_S_PH_QBL
= (0x06 << 6) | OPC_ADDU_QB_DSP
,
562 OPC_MULEU_S_PH_QBR
= (0x07 << 6) | OPC_ADDU_QB_DSP
,
563 OPC_MULQ_RS_PH
= (0x1F << 6) | OPC_ADDU_QB_DSP
,
564 OPC_MULEQ_S_W_PHL
= (0x1C << 6) | OPC_ADDU_QB_DSP
,
565 OPC_MULEQ_S_W_PHR
= (0x1D << 6) | OPC_ADDU_QB_DSP
,
566 OPC_MULQ_S_PH
= (0x1E << 6) | OPC_ADDU_QB_DSP
,
569 #define OPC_ADDUH_QB_DSP OPC_MULT_G_2E
570 #define MASK_ADDUH_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
572 /* MIPS DSP Arithmetic Sub-class */
573 OPC_ADDUH_QB
= (0x00 << 6) | OPC_ADDUH_QB_DSP
,
574 OPC_ADDUH_R_QB
= (0x02 << 6) | OPC_ADDUH_QB_DSP
,
575 OPC_ADDQH_PH
= (0x08 << 6) | OPC_ADDUH_QB_DSP
,
576 OPC_ADDQH_R_PH
= (0x0A << 6) | OPC_ADDUH_QB_DSP
,
577 OPC_ADDQH_W
= (0x10 << 6) | OPC_ADDUH_QB_DSP
,
578 OPC_ADDQH_R_W
= (0x12 << 6) | OPC_ADDUH_QB_DSP
,
579 OPC_SUBUH_QB
= (0x01 << 6) | OPC_ADDUH_QB_DSP
,
580 OPC_SUBUH_R_QB
= (0x03 << 6) | OPC_ADDUH_QB_DSP
,
581 OPC_SUBQH_PH
= (0x09 << 6) | OPC_ADDUH_QB_DSP
,
582 OPC_SUBQH_R_PH
= (0x0B << 6) | OPC_ADDUH_QB_DSP
,
583 OPC_SUBQH_W
= (0x11 << 6) | OPC_ADDUH_QB_DSP
,
584 OPC_SUBQH_R_W
= (0x13 << 6) | OPC_ADDUH_QB_DSP
,
585 /* MIPS DSP Multiply Sub-class insns */
586 OPC_MUL_PH
= (0x0C << 6) | OPC_ADDUH_QB_DSP
,
587 OPC_MUL_S_PH
= (0x0E << 6) | OPC_ADDUH_QB_DSP
,
588 OPC_MULQ_S_W
= (0x16 << 6) | OPC_ADDUH_QB_DSP
,
589 OPC_MULQ_RS_W
= (0x17 << 6) | OPC_ADDUH_QB_DSP
,
592 #define MASK_ABSQ_S_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
594 /* MIPS DSP Arithmetic Sub-class */
595 OPC_ABSQ_S_QB
= (0x01 << 6) | OPC_ABSQ_S_PH_DSP
,
596 OPC_ABSQ_S_PH
= (0x09 << 6) | OPC_ABSQ_S_PH_DSP
,
597 OPC_ABSQ_S_W
= (0x11 << 6) | OPC_ABSQ_S_PH_DSP
,
598 OPC_PRECEQ_W_PHL
= (0x0C << 6) | OPC_ABSQ_S_PH_DSP
,
599 OPC_PRECEQ_W_PHR
= (0x0D << 6) | OPC_ABSQ_S_PH_DSP
,
600 OPC_PRECEQU_PH_QBL
= (0x04 << 6) | OPC_ABSQ_S_PH_DSP
,
601 OPC_PRECEQU_PH_QBR
= (0x05 << 6) | OPC_ABSQ_S_PH_DSP
,
602 OPC_PRECEQU_PH_QBLA
= (0x06 << 6) | OPC_ABSQ_S_PH_DSP
,
603 OPC_PRECEQU_PH_QBRA
= (0x07 << 6) | OPC_ABSQ_S_PH_DSP
,
604 OPC_PRECEU_PH_QBL
= (0x1C << 6) | OPC_ABSQ_S_PH_DSP
,
605 OPC_PRECEU_PH_QBR
= (0x1D << 6) | OPC_ABSQ_S_PH_DSP
,
606 OPC_PRECEU_PH_QBLA
= (0x1E << 6) | OPC_ABSQ_S_PH_DSP
,
607 OPC_PRECEU_PH_QBRA
= (0x1F << 6) | OPC_ABSQ_S_PH_DSP
,
608 /* DSP Bit/Manipulation Sub-class */
609 OPC_BITREV
= (0x1B << 6) | OPC_ABSQ_S_PH_DSP
,
610 OPC_REPL_QB
= (0x02 << 6) | OPC_ABSQ_S_PH_DSP
,
611 OPC_REPLV_QB
= (0x03 << 6) | OPC_ABSQ_S_PH_DSP
,
612 OPC_REPL_PH
= (0x0A << 6) | OPC_ABSQ_S_PH_DSP
,
613 OPC_REPLV_PH
= (0x0B << 6) | OPC_ABSQ_S_PH_DSP
,
616 #define MASK_CMPU_EQ_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
618 /* MIPS DSP Arithmetic Sub-class */
619 OPC_PRECR_QB_PH
= (0x0D << 6) | OPC_CMPU_EQ_QB_DSP
,
620 OPC_PRECRQ_QB_PH
= (0x0C << 6) | OPC_CMPU_EQ_QB_DSP
,
621 OPC_PRECR_SRA_PH_W
= (0x1E << 6) | OPC_CMPU_EQ_QB_DSP
,
622 OPC_PRECR_SRA_R_PH_W
= (0x1F << 6) | OPC_CMPU_EQ_QB_DSP
,
623 OPC_PRECRQ_PH_W
= (0x14 << 6) | OPC_CMPU_EQ_QB_DSP
,
624 OPC_PRECRQ_RS_PH_W
= (0x15 << 6) | OPC_CMPU_EQ_QB_DSP
,
625 OPC_PRECRQU_S_QB_PH
= (0x0F << 6) | OPC_CMPU_EQ_QB_DSP
,
626 /* DSP Compare-Pick Sub-class */
627 OPC_CMPU_EQ_QB
= (0x00 << 6) | OPC_CMPU_EQ_QB_DSP
,
628 OPC_CMPU_LT_QB
= (0x01 << 6) | OPC_CMPU_EQ_QB_DSP
,
629 OPC_CMPU_LE_QB
= (0x02 << 6) | OPC_CMPU_EQ_QB_DSP
,
630 OPC_CMPGU_EQ_QB
= (0x04 << 6) | OPC_CMPU_EQ_QB_DSP
,
631 OPC_CMPGU_LT_QB
= (0x05 << 6) | OPC_CMPU_EQ_QB_DSP
,
632 OPC_CMPGU_LE_QB
= (0x06 << 6) | OPC_CMPU_EQ_QB_DSP
,
633 OPC_CMPGDU_EQ_QB
= (0x18 << 6) | OPC_CMPU_EQ_QB_DSP
,
634 OPC_CMPGDU_LT_QB
= (0x19 << 6) | OPC_CMPU_EQ_QB_DSP
,
635 OPC_CMPGDU_LE_QB
= (0x1A << 6) | OPC_CMPU_EQ_QB_DSP
,
636 OPC_CMP_EQ_PH
= (0x08 << 6) | OPC_CMPU_EQ_QB_DSP
,
637 OPC_CMP_LT_PH
= (0x09 << 6) | OPC_CMPU_EQ_QB_DSP
,
638 OPC_CMP_LE_PH
= (0x0A << 6) | OPC_CMPU_EQ_QB_DSP
,
639 OPC_PICK_QB
= (0x03 << 6) | OPC_CMPU_EQ_QB_DSP
,
640 OPC_PICK_PH
= (0x0B << 6) | OPC_CMPU_EQ_QB_DSP
,
641 OPC_PACKRL_PH
= (0x0E << 6) | OPC_CMPU_EQ_QB_DSP
,
644 #define MASK_SHLL_QB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
646 /* MIPS DSP GPR-Based Shift Sub-class */
647 OPC_SHLL_QB
= (0x00 << 6) | OPC_SHLL_QB_DSP
,
648 OPC_SHLLV_QB
= (0x02 << 6) | OPC_SHLL_QB_DSP
,
649 OPC_SHLL_PH
= (0x08 << 6) | OPC_SHLL_QB_DSP
,
650 OPC_SHLLV_PH
= (0x0A << 6) | OPC_SHLL_QB_DSP
,
651 OPC_SHLL_S_PH
= (0x0C << 6) | OPC_SHLL_QB_DSP
,
652 OPC_SHLLV_S_PH
= (0x0E << 6) | OPC_SHLL_QB_DSP
,
653 OPC_SHLL_S_W
= (0x14 << 6) | OPC_SHLL_QB_DSP
,
654 OPC_SHLLV_S_W
= (0x16 << 6) | OPC_SHLL_QB_DSP
,
655 OPC_SHRL_QB
= (0x01 << 6) | OPC_SHLL_QB_DSP
,
656 OPC_SHRLV_QB
= (0x03 << 6) | OPC_SHLL_QB_DSP
,
657 OPC_SHRL_PH
= (0x19 << 6) | OPC_SHLL_QB_DSP
,
658 OPC_SHRLV_PH
= (0x1B << 6) | OPC_SHLL_QB_DSP
,
659 OPC_SHRA_QB
= (0x04 << 6) | OPC_SHLL_QB_DSP
,
660 OPC_SHRA_R_QB
= (0x05 << 6) | OPC_SHLL_QB_DSP
,
661 OPC_SHRAV_QB
= (0x06 << 6) | OPC_SHLL_QB_DSP
,
662 OPC_SHRAV_R_QB
= (0x07 << 6) | OPC_SHLL_QB_DSP
,
663 OPC_SHRA_PH
= (0x09 << 6) | OPC_SHLL_QB_DSP
,
664 OPC_SHRAV_PH
= (0x0B << 6) | OPC_SHLL_QB_DSP
,
665 OPC_SHRA_R_PH
= (0x0D << 6) | OPC_SHLL_QB_DSP
,
666 OPC_SHRAV_R_PH
= (0x0F << 6) | OPC_SHLL_QB_DSP
,
667 OPC_SHRA_R_W
= (0x15 << 6) | OPC_SHLL_QB_DSP
,
668 OPC_SHRAV_R_W
= (0x17 << 6) | OPC_SHLL_QB_DSP
,
671 #define MASK_DPA_W_PH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
673 /* MIPS DSP Multiply Sub-class insns */
674 OPC_DPAU_H_QBL
= (0x03 << 6) | OPC_DPA_W_PH_DSP
,
675 OPC_DPAU_H_QBR
= (0x07 << 6) | OPC_DPA_W_PH_DSP
,
676 OPC_DPSU_H_QBL
= (0x0B << 6) | OPC_DPA_W_PH_DSP
,
677 OPC_DPSU_H_QBR
= (0x0F << 6) | OPC_DPA_W_PH_DSP
,
678 OPC_DPA_W_PH
= (0x00 << 6) | OPC_DPA_W_PH_DSP
,
679 OPC_DPAX_W_PH
= (0x08 << 6) | OPC_DPA_W_PH_DSP
,
680 OPC_DPAQ_S_W_PH
= (0x04 << 6) | OPC_DPA_W_PH_DSP
,
681 OPC_DPAQX_S_W_PH
= (0x18 << 6) | OPC_DPA_W_PH_DSP
,
682 OPC_DPAQX_SA_W_PH
= (0x1A << 6) | OPC_DPA_W_PH_DSP
,
683 OPC_DPS_W_PH
= (0x01 << 6) | OPC_DPA_W_PH_DSP
,
684 OPC_DPSX_W_PH
= (0x09 << 6) | OPC_DPA_W_PH_DSP
,
685 OPC_DPSQ_S_W_PH
= (0x05 << 6) | OPC_DPA_W_PH_DSP
,
686 OPC_DPSQX_S_W_PH
= (0x19 << 6) | OPC_DPA_W_PH_DSP
,
687 OPC_DPSQX_SA_W_PH
= (0x1B << 6) | OPC_DPA_W_PH_DSP
,
688 OPC_MULSAQ_S_W_PH
= (0x06 << 6) | OPC_DPA_W_PH_DSP
,
689 OPC_DPAQ_SA_L_W
= (0x0C << 6) | OPC_DPA_W_PH_DSP
,
690 OPC_DPSQ_SA_L_W
= (0x0D << 6) | OPC_DPA_W_PH_DSP
,
691 OPC_MAQ_S_W_PHL
= (0x14 << 6) | OPC_DPA_W_PH_DSP
,
692 OPC_MAQ_S_W_PHR
= (0x16 << 6) | OPC_DPA_W_PH_DSP
,
693 OPC_MAQ_SA_W_PHL
= (0x10 << 6) | OPC_DPA_W_PH_DSP
,
694 OPC_MAQ_SA_W_PHR
= (0x12 << 6) | OPC_DPA_W_PH_DSP
,
695 OPC_MULSA_W_PH
= (0x02 << 6) | OPC_DPA_W_PH_DSP
,
698 #define MASK_INSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
700 /* DSP Bit/Manipulation Sub-class */
701 OPC_INSV
= (0x00 << 6) | OPC_INSV_DSP
,
704 #define MASK_APPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
706 /* MIPS DSP Append Sub-class */
707 OPC_APPEND
= (0x00 << 6) | OPC_APPEND_DSP
,
708 OPC_PREPEND
= (0x01 << 6) | OPC_APPEND_DSP
,
709 OPC_BALIGN
= (0x10 << 6) | OPC_APPEND_DSP
,
712 #define MASK_EXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
714 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
715 OPC_EXTR_W
= (0x00 << 6) | OPC_EXTR_W_DSP
,
716 OPC_EXTR_R_W
= (0x04 << 6) | OPC_EXTR_W_DSP
,
717 OPC_EXTR_RS_W
= (0x06 << 6) | OPC_EXTR_W_DSP
,
718 OPC_EXTR_S_H
= (0x0E << 6) | OPC_EXTR_W_DSP
,
719 OPC_EXTRV_S_H
= (0x0F << 6) | OPC_EXTR_W_DSP
,
720 OPC_EXTRV_W
= (0x01 << 6) | OPC_EXTR_W_DSP
,
721 OPC_EXTRV_R_W
= (0x05 << 6) | OPC_EXTR_W_DSP
,
722 OPC_EXTRV_RS_W
= (0x07 << 6) | OPC_EXTR_W_DSP
,
723 OPC_EXTP
= (0x02 << 6) | OPC_EXTR_W_DSP
,
724 OPC_EXTPV
= (0x03 << 6) | OPC_EXTR_W_DSP
,
725 OPC_EXTPDP
= (0x0A << 6) | OPC_EXTR_W_DSP
,
726 OPC_EXTPDPV
= (0x0B << 6) | OPC_EXTR_W_DSP
,
727 OPC_SHILO
= (0x1A << 6) | OPC_EXTR_W_DSP
,
728 OPC_SHILOV
= (0x1B << 6) | OPC_EXTR_W_DSP
,
729 OPC_MTHLIP
= (0x1F << 6) | OPC_EXTR_W_DSP
,
730 OPC_WRDSP
= (0x13 << 6) | OPC_EXTR_W_DSP
,
731 OPC_RDDSP
= (0x12 << 6) | OPC_EXTR_W_DSP
,
734 #define MASK_ABSQ_S_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
736 /* MIPS DSP Arithmetic Sub-class */
737 OPC_PRECEQ_L_PWL
= (0x14 << 6) | OPC_ABSQ_S_QH_DSP
,
738 OPC_PRECEQ_L_PWR
= (0x15 << 6) | OPC_ABSQ_S_QH_DSP
,
739 OPC_PRECEQ_PW_QHL
= (0x0C << 6) | OPC_ABSQ_S_QH_DSP
,
740 OPC_PRECEQ_PW_QHR
= (0x0D << 6) | OPC_ABSQ_S_QH_DSP
,
741 OPC_PRECEQ_PW_QHLA
= (0x0E << 6) | OPC_ABSQ_S_QH_DSP
,
742 OPC_PRECEQ_PW_QHRA
= (0x0F << 6) | OPC_ABSQ_S_QH_DSP
,
743 OPC_PRECEQU_QH_OBL
= (0x04 << 6) | OPC_ABSQ_S_QH_DSP
,
744 OPC_PRECEQU_QH_OBR
= (0x05 << 6) | OPC_ABSQ_S_QH_DSP
,
745 OPC_PRECEQU_QH_OBLA
= (0x06 << 6) | OPC_ABSQ_S_QH_DSP
,
746 OPC_PRECEQU_QH_OBRA
= (0x07 << 6) | OPC_ABSQ_S_QH_DSP
,
747 OPC_PRECEU_QH_OBL
= (0x1C << 6) | OPC_ABSQ_S_QH_DSP
,
748 OPC_PRECEU_QH_OBR
= (0x1D << 6) | OPC_ABSQ_S_QH_DSP
,
749 OPC_PRECEU_QH_OBLA
= (0x1E << 6) | OPC_ABSQ_S_QH_DSP
,
750 OPC_PRECEU_QH_OBRA
= (0x1F << 6) | OPC_ABSQ_S_QH_DSP
,
751 OPC_ABSQ_S_OB
= (0x01 << 6) | OPC_ABSQ_S_QH_DSP
,
752 OPC_ABSQ_S_PW
= (0x11 << 6) | OPC_ABSQ_S_QH_DSP
,
753 OPC_ABSQ_S_QH
= (0x09 << 6) | OPC_ABSQ_S_QH_DSP
,
754 /* DSP Bit/Manipulation Sub-class */
755 OPC_REPL_OB
= (0x02 << 6) | OPC_ABSQ_S_QH_DSP
,
756 OPC_REPL_PW
= (0x12 << 6) | OPC_ABSQ_S_QH_DSP
,
757 OPC_REPL_QH
= (0x0A << 6) | OPC_ABSQ_S_QH_DSP
,
758 OPC_REPLV_OB
= (0x03 << 6) | OPC_ABSQ_S_QH_DSP
,
759 OPC_REPLV_PW
= (0x13 << 6) | OPC_ABSQ_S_QH_DSP
,
760 OPC_REPLV_QH
= (0x0B << 6) | OPC_ABSQ_S_QH_DSP
,
763 #define MASK_ADDU_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
765 /* MIPS DSP Multiply Sub-class insns */
766 OPC_MULEQ_S_PW_QHL
= (0x1C << 6) | OPC_ADDU_OB_DSP
,
767 OPC_MULEQ_S_PW_QHR
= (0x1D << 6) | OPC_ADDU_OB_DSP
,
768 OPC_MULEU_S_QH_OBL
= (0x06 << 6) | OPC_ADDU_OB_DSP
,
769 OPC_MULEU_S_QH_OBR
= (0x07 << 6) | OPC_ADDU_OB_DSP
,
770 OPC_MULQ_RS_QH
= (0x1F << 6) | OPC_ADDU_OB_DSP
,
771 /* MIPS DSP Arithmetic Sub-class */
772 OPC_RADDU_L_OB
= (0x14 << 6) | OPC_ADDU_OB_DSP
,
773 OPC_SUBQ_PW
= (0x13 << 6) | OPC_ADDU_OB_DSP
,
774 OPC_SUBQ_S_PW
= (0x17 << 6) | OPC_ADDU_OB_DSP
,
775 OPC_SUBQ_QH
= (0x0B << 6) | OPC_ADDU_OB_DSP
,
776 OPC_SUBQ_S_QH
= (0x0F << 6) | OPC_ADDU_OB_DSP
,
777 OPC_SUBU_OB
= (0x01 << 6) | OPC_ADDU_OB_DSP
,
778 OPC_SUBU_S_OB
= (0x05 << 6) | OPC_ADDU_OB_DSP
,
779 OPC_SUBU_QH
= (0x09 << 6) | OPC_ADDU_OB_DSP
,
780 OPC_SUBU_S_QH
= (0x0D << 6) | OPC_ADDU_OB_DSP
,
781 OPC_SUBUH_OB
= (0x19 << 6) | OPC_ADDU_OB_DSP
,
782 OPC_SUBUH_R_OB
= (0x1B << 6) | OPC_ADDU_OB_DSP
,
783 OPC_ADDQ_PW
= (0x12 << 6) | OPC_ADDU_OB_DSP
,
784 OPC_ADDQ_S_PW
= (0x16 << 6) | OPC_ADDU_OB_DSP
,
785 OPC_ADDQ_QH
= (0x0A << 6) | OPC_ADDU_OB_DSP
,
786 OPC_ADDQ_S_QH
= (0x0E << 6) | OPC_ADDU_OB_DSP
,
787 OPC_ADDU_OB
= (0x00 << 6) | OPC_ADDU_OB_DSP
,
788 OPC_ADDU_S_OB
= (0x04 << 6) | OPC_ADDU_OB_DSP
,
789 OPC_ADDU_QH
= (0x08 << 6) | OPC_ADDU_OB_DSP
,
790 OPC_ADDU_S_QH
= (0x0C << 6) | OPC_ADDU_OB_DSP
,
791 OPC_ADDUH_OB
= (0x18 << 6) | OPC_ADDU_OB_DSP
,
792 OPC_ADDUH_R_OB
= (0x1A << 6) | OPC_ADDU_OB_DSP
,
795 #define MASK_CMPU_EQ_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
797 /* DSP Compare-Pick Sub-class */
798 OPC_CMP_EQ_PW
= (0x10 << 6) | OPC_CMPU_EQ_OB_DSP
,
799 OPC_CMP_LT_PW
= (0x11 << 6) | OPC_CMPU_EQ_OB_DSP
,
800 OPC_CMP_LE_PW
= (0x12 << 6) | OPC_CMPU_EQ_OB_DSP
,
801 OPC_CMP_EQ_QH
= (0x08 << 6) | OPC_CMPU_EQ_OB_DSP
,
802 OPC_CMP_LT_QH
= (0x09 << 6) | OPC_CMPU_EQ_OB_DSP
,
803 OPC_CMP_LE_QH
= (0x0A << 6) | OPC_CMPU_EQ_OB_DSP
,
804 OPC_CMPGDU_EQ_OB
= (0x18 << 6) | OPC_CMPU_EQ_OB_DSP
,
805 OPC_CMPGDU_LT_OB
= (0x19 << 6) | OPC_CMPU_EQ_OB_DSP
,
806 OPC_CMPGDU_LE_OB
= (0x1A << 6) | OPC_CMPU_EQ_OB_DSP
,
807 OPC_CMPGU_EQ_OB
= (0x04 << 6) | OPC_CMPU_EQ_OB_DSP
,
808 OPC_CMPGU_LT_OB
= (0x05 << 6) | OPC_CMPU_EQ_OB_DSP
,
809 OPC_CMPGU_LE_OB
= (0x06 << 6) | OPC_CMPU_EQ_OB_DSP
,
810 OPC_CMPU_EQ_OB
= (0x00 << 6) | OPC_CMPU_EQ_OB_DSP
,
811 OPC_CMPU_LT_OB
= (0x01 << 6) | OPC_CMPU_EQ_OB_DSP
,
812 OPC_CMPU_LE_OB
= (0x02 << 6) | OPC_CMPU_EQ_OB_DSP
,
813 OPC_PACKRL_PW
= (0x0E << 6) | OPC_CMPU_EQ_OB_DSP
,
814 OPC_PICK_OB
= (0x03 << 6) | OPC_CMPU_EQ_OB_DSP
,
815 OPC_PICK_PW
= (0x13 << 6) | OPC_CMPU_EQ_OB_DSP
,
816 OPC_PICK_QH
= (0x0B << 6) | OPC_CMPU_EQ_OB_DSP
,
817 /* MIPS DSP Arithmetic Sub-class */
818 OPC_PRECR_OB_QH
= (0x0D << 6) | OPC_CMPU_EQ_OB_DSP
,
819 OPC_PRECR_SRA_QH_PW
= (0x1E << 6) | OPC_CMPU_EQ_OB_DSP
,
820 OPC_PRECR_SRA_R_QH_PW
= (0x1F << 6) | OPC_CMPU_EQ_OB_DSP
,
821 OPC_PRECRQ_OB_QH
= (0x0C << 6) | OPC_CMPU_EQ_OB_DSP
,
822 OPC_PRECRQ_PW_L
= (0x1C << 6) | OPC_CMPU_EQ_OB_DSP
,
823 OPC_PRECRQ_QH_PW
= (0x14 << 6) | OPC_CMPU_EQ_OB_DSP
,
824 OPC_PRECRQ_RS_QH_PW
= (0x15 << 6) | OPC_CMPU_EQ_OB_DSP
,
825 OPC_PRECRQU_S_OB_QH
= (0x0F << 6) | OPC_CMPU_EQ_OB_DSP
,
828 #define MASK_DAPPEND(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
830 /* DSP Append Sub-class */
831 OPC_DAPPEND
= (0x00 << 6) | OPC_DAPPEND_DSP
,
832 OPC_PREPENDD
= (0x03 << 6) | OPC_DAPPEND_DSP
,
833 OPC_PREPENDW
= (0x01 << 6) | OPC_DAPPEND_DSP
,
834 OPC_DBALIGN
= (0x10 << 6) | OPC_DAPPEND_DSP
,
837 #define MASK_DEXTR_W(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
839 /* MIPS DSP Accumulator and DSPControl Access Sub-class */
840 OPC_DMTHLIP
= (0x1F << 6) | OPC_DEXTR_W_DSP
,
841 OPC_DSHILO
= (0x1A << 6) | OPC_DEXTR_W_DSP
,
842 OPC_DEXTP
= (0x02 << 6) | OPC_DEXTR_W_DSP
,
843 OPC_DEXTPDP
= (0x0A << 6) | OPC_DEXTR_W_DSP
,
844 OPC_DEXTPDPV
= (0x0B << 6) | OPC_DEXTR_W_DSP
,
845 OPC_DEXTPV
= (0x03 << 6) | OPC_DEXTR_W_DSP
,
846 OPC_DEXTR_L
= (0x10 << 6) | OPC_DEXTR_W_DSP
,
847 OPC_DEXTR_R_L
= (0x14 << 6) | OPC_DEXTR_W_DSP
,
848 OPC_DEXTR_RS_L
= (0x16 << 6) | OPC_DEXTR_W_DSP
,
849 OPC_DEXTR_W
= (0x00 << 6) | OPC_DEXTR_W_DSP
,
850 OPC_DEXTR_R_W
= (0x04 << 6) | OPC_DEXTR_W_DSP
,
851 OPC_DEXTR_RS_W
= (0x06 << 6) | OPC_DEXTR_W_DSP
,
852 OPC_DEXTR_S_H
= (0x0E << 6) | OPC_DEXTR_W_DSP
,
853 OPC_DEXTRV_L
= (0x11 << 6) | OPC_DEXTR_W_DSP
,
854 OPC_DEXTRV_R_L
= (0x15 << 6) | OPC_DEXTR_W_DSP
,
855 OPC_DEXTRV_RS_L
= (0x17 << 6) | OPC_DEXTR_W_DSP
,
856 OPC_DEXTRV_S_H
= (0x0F << 6) | OPC_DEXTR_W_DSP
,
857 OPC_DEXTRV_W
= (0x01 << 6) | OPC_DEXTR_W_DSP
,
858 OPC_DEXTRV_R_W
= (0x05 << 6) | OPC_DEXTR_W_DSP
,
859 OPC_DEXTRV_RS_W
= (0x07 << 6) | OPC_DEXTR_W_DSP
,
860 OPC_DSHILOV
= (0x1B << 6) | OPC_DEXTR_W_DSP
,
863 #define MASK_DINSV(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
865 /* DSP Bit/Manipulation Sub-class */
866 OPC_DINSV
= (0x00 << 6) | OPC_DINSV_DSP
,
869 #define MASK_DPAQ_W_QH(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
871 /* MIPS DSP Multiply Sub-class insns */
872 OPC_DMADD
= (0x19 << 6) | OPC_DPAQ_W_QH_DSP
,
873 OPC_DMADDU
= (0x1D << 6) | OPC_DPAQ_W_QH_DSP
,
874 OPC_DMSUB
= (0x1B << 6) | OPC_DPAQ_W_QH_DSP
,
875 OPC_DMSUBU
= (0x1F << 6) | OPC_DPAQ_W_QH_DSP
,
876 OPC_DPA_W_QH
= (0x00 << 6) | OPC_DPAQ_W_QH_DSP
,
877 OPC_DPAQ_S_W_QH
= (0x04 << 6) | OPC_DPAQ_W_QH_DSP
,
878 OPC_DPAQ_SA_L_PW
= (0x0C << 6) | OPC_DPAQ_W_QH_DSP
,
879 OPC_DPAU_H_OBL
= (0x03 << 6) | OPC_DPAQ_W_QH_DSP
,
880 OPC_DPAU_H_OBR
= (0x07 << 6) | OPC_DPAQ_W_QH_DSP
,
881 OPC_DPS_W_QH
= (0x01 << 6) | OPC_DPAQ_W_QH_DSP
,
882 OPC_DPSQ_S_W_QH
= (0x05 << 6) | OPC_DPAQ_W_QH_DSP
,
883 OPC_DPSQ_SA_L_PW
= (0x0D << 6) | OPC_DPAQ_W_QH_DSP
,
884 OPC_DPSU_H_OBL
= (0x0B << 6) | OPC_DPAQ_W_QH_DSP
,
885 OPC_DPSU_H_OBR
= (0x0F << 6) | OPC_DPAQ_W_QH_DSP
,
886 OPC_MAQ_S_L_PWL
= (0x1C << 6) | OPC_DPAQ_W_QH_DSP
,
887 OPC_MAQ_S_L_PWR
= (0x1E << 6) | OPC_DPAQ_W_QH_DSP
,
888 OPC_MAQ_S_W_QHLL
= (0x14 << 6) | OPC_DPAQ_W_QH_DSP
,
889 OPC_MAQ_SA_W_QHLL
= (0x10 << 6) | OPC_DPAQ_W_QH_DSP
,
890 OPC_MAQ_S_W_QHLR
= (0x15 << 6) | OPC_DPAQ_W_QH_DSP
,
891 OPC_MAQ_SA_W_QHLR
= (0x11 << 6) | OPC_DPAQ_W_QH_DSP
,
892 OPC_MAQ_S_W_QHRL
= (0x16 << 6) | OPC_DPAQ_W_QH_DSP
,
893 OPC_MAQ_SA_W_QHRL
= (0x12 << 6) | OPC_DPAQ_W_QH_DSP
,
894 OPC_MAQ_S_W_QHRR
= (0x17 << 6) | OPC_DPAQ_W_QH_DSP
,
895 OPC_MAQ_SA_W_QHRR
= (0x13 << 6) | OPC_DPAQ_W_QH_DSP
,
896 OPC_MULSAQ_S_L_PW
= (0x0E << 6) | OPC_DPAQ_W_QH_DSP
,
897 OPC_MULSAQ_S_W_QH
= (0x06 << 6) | OPC_DPAQ_W_QH_DSP
,
900 #define MASK_SHLL_OB(op) (MASK_SPECIAL3(op) | (op & (0x1F << 6)))
902 /* MIPS DSP GPR-Based Shift Sub-class */
903 OPC_SHLL_PW
= (0x10 << 6) | OPC_SHLL_OB_DSP
,
904 OPC_SHLL_S_PW
= (0x14 << 6) | OPC_SHLL_OB_DSP
,
905 OPC_SHLLV_OB
= (0x02 << 6) | OPC_SHLL_OB_DSP
,
906 OPC_SHLLV_PW
= (0x12 << 6) | OPC_SHLL_OB_DSP
,
907 OPC_SHLLV_S_PW
= (0x16 << 6) | OPC_SHLL_OB_DSP
,
908 OPC_SHLLV_QH
= (0x0A << 6) | OPC_SHLL_OB_DSP
,
909 OPC_SHLLV_S_QH
= (0x0E << 6) | OPC_SHLL_OB_DSP
,
910 OPC_SHRA_PW
= (0x11 << 6) | OPC_SHLL_OB_DSP
,
911 OPC_SHRA_R_PW
= (0x15 << 6) | OPC_SHLL_OB_DSP
,
912 OPC_SHRAV_OB
= (0x06 << 6) | OPC_SHLL_OB_DSP
,
913 OPC_SHRAV_R_OB
= (0x07 << 6) | OPC_SHLL_OB_DSP
,
914 OPC_SHRAV_PW
= (0x13 << 6) | OPC_SHLL_OB_DSP
,
915 OPC_SHRAV_R_PW
= (0x17 << 6) | OPC_SHLL_OB_DSP
,
916 OPC_SHRAV_QH
= (0x0B << 6) | OPC_SHLL_OB_DSP
,
917 OPC_SHRAV_R_QH
= (0x0F << 6) | OPC_SHLL_OB_DSP
,
918 OPC_SHRLV_OB
= (0x03 << 6) | OPC_SHLL_OB_DSP
,
919 OPC_SHRLV_QH
= (0x1B << 6) | OPC_SHLL_OB_DSP
,
920 OPC_SHLL_OB
= (0x00 << 6) | OPC_SHLL_OB_DSP
,
921 OPC_SHLL_QH
= (0x08 << 6) | OPC_SHLL_OB_DSP
,
922 OPC_SHLL_S_QH
= (0x0C << 6) | OPC_SHLL_OB_DSP
,
923 OPC_SHRA_OB
= (0x04 << 6) | OPC_SHLL_OB_DSP
,
924 OPC_SHRA_R_OB
= (0x05 << 6) | OPC_SHLL_OB_DSP
,
925 OPC_SHRA_QH
= (0x09 << 6) | OPC_SHLL_OB_DSP
,
926 OPC_SHRA_R_QH
= (0x0D << 6) | OPC_SHLL_OB_DSP
,
927 OPC_SHRL_OB
= (0x01 << 6) | OPC_SHLL_OB_DSP
,
928 OPC_SHRL_QH
= (0x19 << 6) | OPC_SHLL_OB_DSP
,
931 /* Coprocessor 0 (rs field) */
932 #define MASK_CP0(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
935 OPC_MFC0
= (0x00 << 21) | OPC_CP0
,
936 OPC_DMFC0
= (0x01 << 21) | OPC_CP0
,
937 OPC_MFHC0
= (0x02 << 21) | OPC_CP0
,
938 OPC_MTC0
= (0x04 << 21) | OPC_CP0
,
939 OPC_DMTC0
= (0x05 << 21) | OPC_CP0
,
940 OPC_MTHC0
= (0x06 << 21) | OPC_CP0
,
941 OPC_MFTR
= (0x08 << 21) | OPC_CP0
,
942 OPC_RDPGPR
= (0x0A << 21) | OPC_CP0
,
943 OPC_MFMC0
= (0x0B << 21) | OPC_CP0
,
944 OPC_MTTR
= (0x0C << 21) | OPC_CP0
,
945 OPC_WRPGPR
= (0x0E << 21) | OPC_CP0
,
946 OPC_C0
= (0x10 << 21) | OPC_CP0
,
947 OPC_C0_1
= (0x11 << 21) | OPC_CP0
,
948 OPC_C0_2
= (0x12 << 21) | OPC_CP0
,
949 OPC_C0_3
= (0x13 << 21) | OPC_CP0
,
950 OPC_C0_4
= (0x14 << 21) | OPC_CP0
,
951 OPC_C0_5
= (0x15 << 21) | OPC_CP0
,
952 OPC_C0_6
= (0x16 << 21) | OPC_CP0
,
953 OPC_C0_7
= (0x17 << 21) | OPC_CP0
,
954 OPC_C0_8
= (0x18 << 21) | OPC_CP0
,
955 OPC_C0_9
= (0x19 << 21) | OPC_CP0
,
956 OPC_C0_A
= (0x1A << 21) | OPC_CP0
,
957 OPC_C0_B
= (0x1B << 21) | OPC_CP0
,
958 OPC_C0_C
= (0x1C << 21) | OPC_CP0
,
959 OPC_C0_D
= (0x1D << 21) | OPC_CP0
,
960 OPC_C0_E
= (0x1E << 21) | OPC_CP0
,
961 OPC_C0_F
= (0x1F << 21) | OPC_CP0
,
965 #define MASK_MFMC0(op) (MASK_CP0(op) | (op & 0xFFFF))
968 OPC_DMT
= 0x01 | (0 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
969 OPC_EMT
= 0x01 | (1 << 5) | (0x0F << 6) | (0x01 << 11) | OPC_MFMC0
,
970 OPC_DVPE
= 0x01 | (0 << 5) | OPC_MFMC0
,
971 OPC_EVPE
= 0x01 | (1 << 5) | OPC_MFMC0
,
972 OPC_DI
= (0 << 5) | (0x0C << 11) | OPC_MFMC0
,
973 OPC_EI
= (1 << 5) | (0x0C << 11) | OPC_MFMC0
,
974 OPC_DVP
= 0x04 | (0 << 3) | (1 << 5) | (0 << 11) | OPC_MFMC0
,
975 OPC_EVP
= 0x04 | (0 << 3) | (0 << 5) | (0 << 11) | OPC_MFMC0
,
978 /* Coprocessor 0 (with rs == C0) */
979 #define MASK_C0(op) (MASK_CP0(op) | (op & 0x3F))
982 OPC_TLBR
= 0x01 | OPC_C0
,
983 OPC_TLBWI
= 0x02 | OPC_C0
,
984 OPC_TLBINV
= 0x03 | OPC_C0
,
985 OPC_TLBINVF
= 0x04 | OPC_C0
,
986 OPC_TLBWR
= 0x06 | OPC_C0
,
987 OPC_TLBP
= 0x08 | OPC_C0
,
988 OPC_RFE
= 0x10 | OPC_C0
,
989 OPC_ERET
= 0x18 | OPC_C0
,
990 OPC_DERET
= 0x1F | OPC_C0
,
991 OPC_WAIT
= 0x20 | OPC_C0
,
994 #define MASK_CP2(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)))
997 OPC_MFC2
= (0x00 << 21) | OPC_CP2
,
998 OPC_DMFC2
= (0x01 << 21) | OPC_CP2
,
999 OPC_CFC2
= (0x02 << 21) | OPC_CP2
,
1000 OPC_MFHC2
= (0x03 << 21) | OPC_CP2
,
1001 OPC_MTC2
= (0x04 << 21) | OPC_CP2
,
1002 OPC_DMTC2
= (0x05 << 21) | OPC_CP2
,
1003 OPC_CTC2
= (0x06 << 21) | OPC_CP2
,
1004 OPC_MTHC2
= (0x07 << 21) | OPC_CP2
,
1005 OPC_BC2
= (0x08 << 21) | OPC_CP2
,
1006 OPC_BC2EQZ
= (0x09 << 21) | OPC_CP2
,
1007 OPC_BC2NEZ
= (0x0D << 21) | OPC_CP2
,
1010 #define MASK_LMMI(op) (MASK_OP_MAJOR(op) | (op & (0x1F << 21)) | (op & 0x1F))
1013 OPC_PADDSH
= (24 << 21) | (0x00) | OPC_CP2
,
1014 OPC_PADDUSH
= (25 << 21) | (0x00) | OPC_CP2
,
1015 OPC_PADDH
= (26 << 21) | (0x00) | OPC_CP2
,
1016 OPC_PADDW
= (27 << 21) | (0x00) | OPC_CP2
,
1017 OPC_PADDSB
= (28 << 21) | (0x00) | OPC_CP2
,
1018 OPC_PADDUSB
= (29 << 21) | (0x00) | OPC_CP2
,
1019 OPC_PADDB
= (30 << 21) | (0x00) | OPC_CP2
,
1020 OPC_PADDD
= (31 << 21) | (0x00) | OPC_CP2
,
1022 OPC_PSUBSH
= (24 << 21) | (0x01) | OPC_CP2
,
1023 OPC_PSUBUSH
= (25 << 21) | (0x01) | OPC_CP2
,
1024 OPC_PSUBH
= (26 << 21) | (0x01) | OPC_CP2
,
1025 OPC_PSUBW
= (27 << 21) | (0x01) | OPC_CP2
,
1026 OPC_PSUBSB
= (28 << 21) | (0x01) | OPC_CP2
,
1027 OPC_PSUBUSB
= (29 << 21) | (0x01) | OPC_CP2
,
1028 OPC_PSUBB
= (30 << 21) | (0x01) | OPC_CP2
,
1029 OPC_PSUBD
= (31 << 21) | (0x01) | OPC_CP2
,
1031 OPC_PSHUFH
= (24 << 21) | (0x02) | OPC_CP2
,
1032 OPC_PACKSSWH
= (25 << 21) | (0x02) | OPC_CP2
,
1033 OPC_PACKSSHB
= (26 << 21) | (0x02) | OPC_CP2
,
1034 OPC_PACKUSHB
= (27 << 21) | (0x02) | OPC_CP2
,
1035 OPC_XOR_CP2
= (28 << 21) | (0x02) | OPC_CP2
,
1036 OPC_NOR_CP2
= (29 << 21) | (0x02) | OPC_CP2
,
1037 OPC_AND_CP2
= (30 << 21) | (0x02) | OPC_CP2
,
1038 OPC_PANDN
= (31 << 21) | (0x02) | OPC_CP2
,
1040 OPC_PUNPCKLHW
= (24 << 21) | (0x03) | OPC_CP2
,
1041 OPC_PUNPCKHHW
= (25 << 21) | (0x03) | OPC_CP2
,
1042 OPC_PUNPCKLBH
= (26 << 21) | (0x03) | OPC_CP2
,
1043 OPC_PUNPCKHBH
= (27 << 21) | (0x03) | OPC_CP2
,
1044 OPC_PINSRH_0
= (28 << 21) | (0x03) | OPC_CP2
,
1045 OPC_PINSRH_1
= (29 << 21) | (0x03) | OPC_CP2
,
1046 OPC_PINSRH_2
= (30 << 21) | (0x03) | OPC_CP2
,
1047 OPC_PINSRH_3
= (31 << 21) | (0x03) | OPC_CP2
,
1049 OPC_PAVGH
= (24 << 21) | (0x08) | OPC_CP2
,
1050 OPC_PAVGB
= (25 << 21) | (0x08) | OPC_CP2
,
1051 OPC_PMAXSH
= (26 << 21) | (0x08) | OPC_CP2
,
1052 OPC_PMINSH
= (27 << 21) | (0x08) | OPC_CP2
,
1053 OPC_PMAXUB
= (28 << 21) | (0x08) | OPC_CP2
,
1054 OPC_PMINUB
= (29 << 21) | (0x08) | OPC_CP2
,
1056 OPC_PCMPEQW
= (24 << 21) | (0x09) | OPC_CP2
,
1057 OPC_PCMPGTW
= (25 << 21) | (0x09) | OPC_CP2
,
1058 OPC_PCMPEQH
= (26 << 21) | (0x09) | OPC_CP2
,
1059 OPC_PCMPGTH
= (27 << 21) | (0x09) | OPC_CP2
,
1060 OPC_PCMPEQB
= (28 << 21) | (0x09) | OPC_CP2
,
1061 OPC_PCMPGTB
= (29 << 21) | (0x09) | OPC_CP2
,
1063 OPC_PSLLW
= (24 << 21) | (0x0A) | OPC_CP2
,
1064 OPC_PSLLH
= (25 << 21) | (0x0A) | OPC_CP2
,
1065 OPC_PMULLH
= (26 << 21) | (0x0A) | OPC_CP2
,
1066 OPC_PMULHH
= (27 << 21) | (0x0A) | OPC_CP2
,
1067 OPC_PMULUW
= (28 << 21) | (0x0A) | OPC_CP2
,
1068 OPC_PMULHUH
= (29 << 21) | (0x0A) | OPC_CP2
,
1070 OPC_PSRLW
= (24 << 21) | (0x0B) | OPC_CP2
,
1071 OPC_PSRLH
= (25 << 21) | (0x0B) | OPC_CP2
,
1072 OPC_PSRAW
= (26 << 21) | (0x0B) | OPC_CP2
,
1073 OPC_PSRAH
= (27 << 21) | (0x0B) | OPC_CP2
,
1074 OPC_PUNPCKLWD
= (28 << 21) | (0x0B) | OPC_CP2
,
1075 OPC_PUNPCKHWD
= (29 << 21) | (0x0B) | OPC_CP2
,
1077 OPC_ADDU_CP2
= (24 << 21) | (0x0C) | OPC_CP2
,
1078 OPC_OR_CP2
= (25 << 21) | (0x0C) | OPC_CP2
,
1079 OPC_ADD_CP2
= (26 << 21) | (0x0C) | OPC_CP2
,
1080 OPC_DADD_CP2
= (27 << 21) | (0x0C) | OPC_CP2
,
1081 OPC_SEQU_CP2
= (28 << 21) | (0x0C) | OPC_CP2
,
1082 OPC_SEQ_CP2
= (29 << 21) | (0x0C) | OPC_CP2
,
1084 OPC_SUBU_CP2
= (24 << 21) | (0x0D) | OPC_CP2
,
1085 OPC_PASUBUB
= (25 << 21) | (0x0D) | OPC_CP2
,
1086 OPC_SUB_CP2
= (26 << 21) | (0x0D) | OPC_CP2
,
1087 OPC_DSUB_CP2
= (27 << 21) | (0x0D) | OPC_CP2
,
1088 OPC_SLTU_CP2
= (28 << 21) | (0x0D) | OPC_CP2
,
1089 OPC_SLT_CP2
= (29 << 21) | (0x0D) | OPC_CP2
,
1091 OPC_SLL_CP2
= (24 << 21) | (0x0E) | OPC_CP2
,
1092 OPC_DSLL_CP2
= (25 << 21) | (0x0E) | OPC_CP2
,
1093 OPC_PEXTRH
= (26 << 21) | (0x0E) | OPC_CP2
,
1094 OPC_PMADDHW
= (27 << 21) | (0x0E) | OPC_CP2
,
1095 OPC_SLEU_CP2
= (28 << 21) | (0x0E) | OPC_CP2
,
1096 OPC_SLE_CP2
= (29 << 21) | (0x0E) | OPC_CP2
,
1098 OPC_SRL_CP2
= (24 << 21) | (0x0F) | OPC_CP2
,
1099 OPC_DSRL_CP2
= (25 << 21) | (0x0F) | OPC_CP2
,
1100 OPC_SRA_CP2
= (26 << 21) | (0x0F) | OPC_CP2
,
1101 OPC_DSRA_CP2
= (27 << 21) | (0x0F) | OPC_CP2
,
1102 OPC_BIADD
= (28 << 21) | (0x0F) | OPC_CP2
,
1103 OPC_PMOVMSKB
= (29 << 21) | (0x0F) | OPC_CP2
,
1107 #define MASK_CP3(op) (MASK_OP_MAJOR(op) | (op & 0x3F))
1110 OPC_LWXC1
= 0x00 | OPC_CP3
,
1111 OPC_LDXC1
= 0x01 | OPC_CP3
,
1112 OPC_LUXC1
= 0x05 | OPC_CP3
,
1113 OPC_SWXC1
= 0x08 | OPC_CP3
,
1114 OPC_SDXC1
= 0x09 | OPC_CP3
,
1115 OPC_SUXC1
= 0x0D | OPC_CP3
,
1116 OPC_PREFX
= 0x0F | OPC_CP3
,
1117 OPC_ALNV_PS
= 0x1E | OPC_CP3
,
1118 OPC_MADD_S
= 0x20 | OPC_CP3
,
1119 OPC_MADD_D
= 0x21 | OPC_CP3
,
1120 OPC_MADD_PS
= 0x26 | OPC_CP3
,
1121 OPC_MSUB_S
= 0x28 | OPC_CP3
,
1122 OPC_MSUB_D
= 0x29 | OPC_CP3
,
1123 OPC_MSUB_PS
= 0x2E | OPC_CP3
,
1124 OPC_NMADD_S
= 0x30 | OPC_CP3
,
1125 OPC_NMADD_D
= 0x31 | OPC_CP3
,
1126 OPC_NMADD_PS
= 0x36 | OPC_CP3
,
1127 OPC_NMSUB_S
= 0x38 | OPC_CP3
,
1128 OPC_NMSUB_D
= 0x39 | OPC_CP3
,
1129 OPC_NMSUB_PS
= 0x3E | OPC_CP3
,
1134 * AN OVERVIEW OF MXU EXTENSION INSTRUCTION SET
1135 * ============================================
1138 * MXU (full name: MIPS eXtension/enhanced Unit) is a SIMD extension of MIPS32
1139 * instructions set. It is designed to fit the needs of signal, graphical and
1140 * video processing applications. MXU instruction set is used in Xburst family
1141 * of microprocessors by Ingenic.
1143 * MXU unit contains 17 registers called X0-X16. X0 is always zero, and X16 is
1144 * the control register.
1147 * The notation used in MXU assembler mnemonics
1148 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1150 * Register operands:
1152 * XRa, XRb, XRc, XRd - MXU registers
1153 * Rb, Rc, Rd, Rs, Rt - general purpose MIPS registers
1155 * Non-register operands:
1157 * aptn1 - 1-bit accumulate add/subtract pattern
1158 * aptn2 - 2-bit accumulate add/subtract pattern
1159 * eptn2 - 2-bit execute add/subtract pattern
1160 * optn2 - 2-bit operand pattern
1161 * optn3 - 3-bit operand pattern
1162 * sft4 - 4-bit shift amount
1163 * strd2 - 2-bit stride amount
1167 * Level of parallelism: Operand size:
1168 * S - single operation at a time 32 - word
1169 * D - two operations in parallel 16 - half word
1170 * Q - four operations in parallel 8 - byte
1174 * ADD - Add or subtract
1175 * ADDC - Add with carry-in
1177 * ASUM - Sum together then accumulate (add or subtract)
1178 * ASUMC - Sum together then accumulate (add or subtract) with carry-in
1179 * AVG - Average between 2 operands
1180 * ABD - Absolute difference
1182 * AND - Logical bitwise 'and' operation
1184 * EXTR - Extract bits
1185 * I2M - Move from GPR register to MXU register
1186 * LDD - Load data from memory to XRF
1187 * LDI - Load data from memory to XRF (and increase the address base)
1188 * LUI - Load unsigned immediate
1190 * MULU - Unsigned multiply
1191 * MADD - 64-bit operand add 32x32 product
1192 * MSUB - 64-bit operand subtract 32x32 product
1193 * MAC - Multiply and accumulate (add or subtract)
1194 * MAD - Multiply and add or subtract
1195 * MAX - Maximum between 2 operands
1196 * MIN - Minimum between 2 operands
1197 * M2I - Move from MXU register to GPR register
1198 * MOVZ - Move if zero
1199 * MOVN - Move if non-zero
1200 * NOR - Logical bitwise 'nor' operation
1201 * OR - Logical bitwise 'or' operation
1202 * STD - Store data from XRF to memory
1203 * SDI - Store data from XRF to memory (and increase the address base)
1204 * SLT - Set of less than comparison
1205 * SAD - Sum of absolute differences
1206 * SLL - Logical shift left
1207 * SLR - Logical shift right
1208 * SAR - Arithmetic shift right
1211 * SCOP - Calculate x’s scope (-1, means x<0; 0, means x==0; 1, means x>0)
1212 * XOR - Logical bitwise 'exclusive or' operation
1216 * E - Expand results
1217 * F - Fixed point multiplication
1218 * L - Low part result
1219 * R - Doing rounding
1220 * V - Variable instead of immediate
1221 * W - Combine above L and V
1224 * The list of MXU instructions grouped by functionality
1225 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1227 * Load/Store instructions Multiplication instructions
1228 * ----------------------- ---------------------------
1230 * S32LDD XRa, Rb, s12 S32MADD XRa, XRd, Rs, Rt
1231 * S32STD XRa, Rb, s12 S32MADDU XRa, XRd, Rs, Rt
1232 * S32LDDV XRa, Rb, rc, strd2 S32MSUB XRa, XRd, Rs, Rt
1233 * S32STDV XRa, Rb, rc, strd2 S32MSUBU XRa, XRd, Rs, Rt
1234 * S32LDI XRa, Rb, s12 S32MUL XRa, XRd, Rs, Rt
1235 * S32SDI XRa, Rb, s12 S32MULU XRa, XRd, Rs, Rt
1236 * S32LDIV XRa, Rb, rc, strd2 D16MUL XRa, XRb, XRc, XRd, optn2
1237 * S32SDIV XRa, Rb, rc, strd2 D16MULE XRa, XRb, XRc, optn2
1238 * S32LDDR XRa, Rb, s12 D16MULF XRa, XRb, XRc, optn2
1239 * S32STDR XRa, Rb, s12 D16MAC XRa, XRb, XRc, XRd, aptn2, optn2
1240 * S32LDDVR XRa, Rb, rc, strd2 D16MACE XRa, XRb, XRc, XRd, aptn2, optn2
1241 * S32STDVR XRa, Rb, rc, strd2 D16MACF XRa, XRb, XRc, XRd, aptn2, optn2
1242 * S32LDIR XRa, Rb, s12 D16MADL XRa, XRb, XRc, XRd, aptn2, optn2
1243 * S32SDIR XRa, Rb, s12 S16MAD XRa, XRb, XRc, XRd, aptn1, optn2
1244 * S32LDIVR XRa, Rb, rc, strd2 Q8MUL XRa, XRb, XRc, XRd
1245 * S32SDIVR XRa, Rb, rc, strd2 Q8MULSU XRa, XRb, XRc, XRd
1246 * S16LDD XRa, Rb, s10, eptn2 Q8MAC XRa, XRb, XRc, XRd, aptn2
1247 * S16STD XRa, Rb, s10, eptn2 Q8MACSU XRa, XRb, XRc, XRd, aptn2
1248 * S16LDI XRa, Rb, s10, eptn2 Q8MADL XRa, XRb, XRc, XRd, aptn2
1249 * S16SDI XRa, Rb, s10, eptn2
1250 * S8LDD XRa, Rb, s8, eptn3
1251 * S8STD XRa, Rb, s8, eptn3 Addition and subtraction instructions
1252 * S8LDI XRa, Rb, s8, eptn3 -------------------------------------
1253 * S8SDI XRa, Rb, s8, eptn3
1254 * LXW Rd, Rs, Rt, strd2 D32ADD XRa, XRb, XRc, XRd, eptn2
1255 * LXH Rd, Rs, Rt, strd2 D32ADDC XRa, XRb, XRc, XRd
1256 * LXHU Rd, Rs, Rt, strd2 D32ACC XRa, XRb, XRc, XRd, eptn2
1257 * LXB Rd, Rs, Rt, strd2 D32ACCM XRa, XRb, XRc, XRd, eptn2
1258 * LXBU Rd, Rs, Rt, strd2 D32ASUM XRa, XRb, XRc, XRd, eptn2
1259 * S32CPS XRa, XRb, XRc
1260 * Q16ADD XRa, XRb, XRc, XRd, eptn2, optn2
1261 * Comparison instructions Q16ACC XRa, XRb, XRc, XRd, eptn2
1262 * ----------------------- Q16ACCM XRa, XRb, XRc, XRd, eptn2
1263 * D16ASUM XRa, XRb, XRc, XRd, eptn2
1264 * S32MAX XRa, XRb, XRc D16CPS XRa, XRb,
1265 * S32MIN XRa, XRb, XRc D16AVG XRa, XRb, XRc
1266 * S32SLT XRa, XRb, XRc D16AVGR XRa, XRb, XRc
1267 * S32MOVZ XRa, XRb, XRc Q8ADD XRa, XRb, XRc, eptn2
1268 * S32MOVN XRa, XRb, XRc Q8ADDE XRa, XRb, XRc, XRd, eptn2
1269 * D16MAX XRa, XRb, XRc Q8ACCE XRa, XRb, XRc, XRd, eptn2
1270 * D16MIN XRa, XRb, XRc Q8ABD XRa, XRb, XRc
1271 * D16SLT XRa, XRb, XRc Q8SAD XRa, XRb, XRc, XRd
1272 * D16MOVZ XRa, XRb, XRc Q8AVG XRa, XRb, XRc
1273 * D16MOVN XRa, XRb, XRc Q8AVGR XRa, XRb, XRc
1274 * Q8MAX XRa, XRb, XRc D8SUM XRa, XRb, XRc, XRd
1275 * Q8MIN XRa, XRb, XRc D8SUMC XRa, XRb, XRc, XRd
1276 * Q8SLT XRa, XRb, XRc
1277 * Q8SLTU XRa, XRb, XRc
1278 * Q8MOVZ XRa, XRb, XRc Shift instructions
1279 * Q8MOVN XRa, XRb, XRc ------------------
1281 * D32SLL XRa, XRb, XRc, XRd, sft4
1282 * Bitwise instructions D32SLR XRa, XRb, XRc, XRd, sft4
1283 * -------------------- D32SAR XRa, XRb, XRc, XRd, sft4
1284 * D32SARL XRa, XRb, XRc, sft4
1285 * S32NOR XRa, XRb, XRc D32SLLV XRa, XRb, Rb
1286 * S32AND XRa, XRb, XRc D32SLRV XRa, XRb, Rb
1287 * S32XOR XRa, XRb, XRc D32SARV XRa, XRb, Rb
1288 * S32OR XRa, XRb, XRc D32SARW XRa, XRb, XRc, Rb
1289 * Q16SLL XRa, XRb, XRc, XRd, sft4
1290 * Q16SLR XRa, XRb, XRc, XRd, sft4
1291 * Miscellaneous instructions Q16SAR XRa, XRb, XRc, XRd, sft4
1292 * ------------------------- Q16SLLV XRa, XRb, Rb
1293 * Q16SLRV XRa, XRb, Rb
1294 * S32SFL XRa, XRb, XRc, XRd, optn2 Q16SARV XRa, XRb, Rb
1295 * S32ALN XRa, XRb, XRc, Rb
1296 * S32ALNI XRa, XRb, XRc, s3
1297 * S32LUI XRa, s8, optn3 Move instructions
1298 * S32EXTR XRa, XRb, Rb, bits5 -----------------
1299 * S32EXTRV XRa, XRb, Rs, Rt
1300 * Q16SCOP XRa, XRb, XRc, XRd S32M2I XRa, Rb
1301 * Q16SAT XRa, XRb, XRc S32I2M XRa, Rb
1304 * The opcode organization of MXU instructions
1305 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1307 * The bits 31..26 of all MXU instructions are equal to 0x1C (also referred
1308 * as opcode SPECIAL2 in the base MIPS ISA). The organization and meaning of
1309 * other bits up to the instruction level is as follows:
1314 * ┌─ 000000 ─ OPC_MXU_S32MADD
1315 * ├─ 000001 ─ OPC_MXU_S32MADDU
1316 * ├─ 000010 ─ <not assigned> (non-MXU OPC_MUL)
1319 * ├─ 000011 ─ OPC_MXU__POOL00 ─┬─ 000 ─ OPC_MXU_S32MAX
1320 * │ ├─ 001 ─ OPC_MXU_S32MIN
1321 * │ ├─ 010 ─ OPC_MXU_D16MAX
1322 * │ ├─ 011 ─ OPC_MXU_D16MIN
1323 * │ ├─ 100 ─ OPC_MXU_Q8MAX
1324 * │ ├─ 101 ─ OPC_MXU_Q8MIN
1325 * │ ├─ 110 ─ OPC_MXU_Q8SLT
1326 * │ └─ 111 ─ OPC_MXU_Q8SLTU
1327 * ├─ 000100 ─ OPC_MXU_S32MSUB
1328 * ├─ 000101 ─ OPC_MXU_S32MSUBU 20..18
1329 * ├─ 000110 ─ OPC_MXU__POOL01 ─┬─ 000 ─ OPC_MXU_S32SLT
1330 * │ ├─ 001 ─ OPC_MXU_D16SLT
1331 * │ ├─ 010 ─ OPC_MXU_D16AVG
1332 * │ ├─ 011 ─ OPC_MXU_D16AVGR
1333 * │ ├─ 100 ─ OPC_MXU_Q8AVG
1334 * │ ├─ 101 ─ OPC_MXU_Q8AVGR
1335 * │ └─ 111 ─ OPC_MXU_Q8ADD
1338 * ├─ 000111 ─ OPC_MXU__POOL02 ─┬─ 000 ─ OPC_MXU_S32CPS
1339 * │ ├─ 010 ─ OPC_MXU_D16CPS
1340 * │ ├─ 100 ─ OPC_MXU_Q8ABD
1341 * │ └─ 110 ─ OPC_MXU_Q16SAT
1342 * ├─ 001000 ─ OPC_MXU_D16MUL
1344 * ├─ 001001 ─ OPC_MXU__POOL03 ─┬─ 00 ─ OPC_MXU_D16MULF
1345 * │ └─ 01 ─ OPC_MXU_D16MULE
1346 * ├─ 001010 ─ OPC_MXU_D16MAC
1347 * ├─ 001011 ─ OPC_MXU_D16MACF
1348 * ├─ 001100 ─ OPC_MXU_D16MADL
1349 * ├─ 001101 ─ OPC_MXU_S16MAD
1350 * ├─ 001110 ─ OPC_MXU_Q16ADD
1351 * ├─ 001111 ─ OPC_MXU_D16MACE 23
1352 * │ ┌─ 0 ─ OPC_MXU_S32LDD
1353 * ├─ 010000 ─ OPC_MXU__POOL04 ─┴─ 1 ─ OPC_MXU_S32LDDR
1356 * ├─ 010001 ─ OPC_MXU__POOL05 ─┬─ 0 ─ OPC_MXU_S32STD
1357 * │ └─ 1 ─ OPC_MXU_S32STDR
1360 * ├─ 010010 ─ OPC_MXU__POOL06 ─┬─ 0000 ─ OPC_MXU_S32LDDV
1361 * │ └─ 0001 ─ OPC_MXU_S32LDDVR
1364 * ├─ 010011 ─ OPC_MXU__POOL07 ─┬─ 0000 ─ OPC_MXU_S32STDV
1365 * │ └─ 0001 ─ OPC_MXU_S32STDVR
1368 * ├─ 010100 ─ OPC_MXU__POOL08 ─┬─ 0 ─ OPC_MXU_S32LDI
1369 * │ └─ 1 ─ OPC_MXU_S32LDIR
1372 * ├─ 010101 ─ OPC_MXU__POOL09 ─┬─ 0 ─ OPC_MXU_S32SDI
1373 * │ └─ 1 ─ OPC_MXU_S32SDIR
1376 * ├─ 010110 ─ OPC_MXU__POOL10 ─┬─ 0000 ─ OPC_MXU_S32LDIV
1377 * │ └─ 0001 ─ OPC_MXU_S32LDIVR
1380 * ├─ 010111 ─ OPC_MXU__POOL11 ─┬─ 0000 ─ OPC_MXU_S32SDIV
1381 * │ └─ 0001 ─ OPC_MXU_S32SDIVR
1382 * ├─ 011000 ─ OPC_MXU_D32ADD
1384 * MXU ├─ 011001 ─ OPC_MXU__POOL12 ─┬─ 00 ─ OPC_MXU_D32ACC
1385 * opcodes ─┤ ├─ 01 ─ OPC_MXU_D32ACCM
1386 * │ └─ 10 ─ OPC_MXU_D32ASUM
1387 * ├─ 011010 ─ <not assigned>
1389 * ├─ 011011 ─ OPC_MXU__POOL13 ─┬─ 00 ─ OPC_MXU_Q16ACC
1390 * │ ├─ 01 ─ OPC_MXU_Q16ACCM
1391 * │ └─ 10 ─ OPC_MXU_Q16ASUM
1394 * ├─ 011100 ─ OPC_MXU__POOL14 ─┬─ 00 ─ OPC_MXU_Q8ADDE
1395 * │ ├─ 01 ─ OPC_MXU_D8SUM
1396 * ├─ 011101 ─ OPC_MXU_Q8ACCE └─ 10 ─ OPC_MXU_D8SUMC
1397 * ├─ 011110 ─ <not assigned>
1398 * ├─ 011111 ─ <not assigned>
1399 * ├─ 100000 ─ <not assigned> (overlaps with CLZ)
1400 * ├─ 100001 ─ <not assigned> (overlaps with CLO)
1401 * ├─ 100010 ─ OPC_MXU_S8LDD
1402 * ├─ 100011 ─ OPC_MXU_S8STD 15..14
1403 * ├─ 100100 ─ OPC_MXU_S8LDI ┌─ 00 ─ OPC_MXU_S32MUL
1404 * ├─ 100101 ─ OPC_MXU_S8SDI ├─ 00 ─ OPC_MXU_S32MULU
1405 * │ ├─ 00 ─ OPC_MXU_S32EXTR
1406 * ├─ 100110 ─ OPC_MXU__POOL15 ─┴─ 00 ─ OPC_MXU_S32EXTRV
1409 * ├─ 100111 ─ OPC_MXU__POOL16 ─┬─ 000 ─ OPC_MXU_D32SARW
1410 * │ ├─ 001 ─ OPC_MXU_S32ALN
1411 * │ ├─ 010 ─ OPC_MXU_S32ALNI
1412 * │ ├─ 011 ─ OPC_MXU_S32LUI
1413 * │ ├─ 100 ─ OPC_MXU_S32NOR
1414 * │ ├─ 101 ─ OPC_MXU_S32AND
1415 * │ ├─ 110 ─ OPC_MXU_S32OR
1416 * │ └─ 111 ─ OPC_MXU_S32XOR
1419 * ├─ 101000 ─ OPC_MXU__POOL17 ─┬─ 000 ─ OPC_MXU_LXB
1420 * │ ├─ 001 ─ OPC_MXU_LXH
1421 * ├─ 101001 ─ <not assigned> ├─ 011 ─ OPC_MXU_LXW
1422 * ├─ 101010 ─ OPC_MXU_S16LDD ├─ 100 ─ OPC_MXU_LXBU
1423 * ├─ 101011 ─ OPC_MXU_S16STD └─ 101 ─ OPC_MXU_LXHU
1424 * ├─ 101100 ─ OPC_MXU_S16LDI
1425 * ├─ 101101 ─ OPC_MXU_S16SDI
1426 * ├─ 101110 ─ OPC_MXU_S32M2I
1427 * ├─ 101111 ─ OPC_MXU_S32I2M
1428 * ├─ 110000 ─ OPC_MXU_D32SLL
1429 * ├─ 110001 ─ OPC_MXU_D32SLR 20..18
1430 * ├─ 110010 ─ OPC_MXU_D32SARL ┌─ 000 ─ OPC_MXU_D32SLLV
1431 * ├─ 110011 ─ OPC_MXU_D32SAR ├─ 001 ─ OPC_MXU_D32SLRV
1432 * ├─ 110100 ─ OPC_MXU_Q16SLL ├─ 010 ─ OPC_MXU_D32SARV
1433 * ├─ 110101 ─ OPC_MXU_Q16SLR ├─ 011 ─ OPC_MXU_Q16SLLV
1434 * │ ├─ 100 ─ OPC_MXU_Q16SLRV
1435 * ├─ 110110 ─ OPC_MXU__POOL18 ─┴─ 101 ─ OPC_MXU_Q16SARV
1437 * ├─ 110111 ─ OPC_MXU_Q16SAR
1439 * ├─ 111000 ─ OPC_MXU__POOL19 ─┬─ 00 ─ OPC_MXU_Q8MUL
1440 * │ └─ 01 ─ OPC_MXU_Q8MULSU
1443 * ├─ 111001 ─ OPC_MXU__POOL20 ─┬─ 000 ─ OPC_MXU_Q8MOVZ
1444 * │ ├─ 001 ─ OPC_MXU_Q8MOVN
1445 * │ ├─ 010 ─ OPC_MXU_D16MOVZ
1446 * │ ├─ 011 ─ OPC_MXU_D16MOVN
1447 * │ ├─ 100 ─ OPC_MXU_S32MOVZ
1448 * │ └─ 101 ─ OPC_MXU_S32MOVN
1451 * ├─ 111010 ─ OPC_MXU__POOL21 ─┬─ 00 ─ OPC_MXU_Q8MAC
1452 * │ └─ 10 ─ OPC_MXU_Q8MACSU
1453 * ├─ 111011 ─ OPC_MXU_Q16SCOP
1454 * ├─ 111100 ─ OPC_MXU_Q8MADL
1455 * ├─ 111101 ─ OPC_MXU_S32SFL
1456 * ├─ 111110 ─ OPC_MXU_Q8SAD
1457 * └─ 111111 ─ <not assigned> (overlaps with SDBBP)
1462 * "XBurst® Instruction Set Architecture MIPS eXtension/enhanced Unit
1463 * Programming Manual", Ingenic Semiconductor Co, Ltd., revision June 2, 2017
1467 OPC_MXU__POOL00
= 0x03,
1468 OPC_MXU_D16MUL
= 0x08,
1469 OPC_MXU_D16MAC
= 0x0A,
1470 OPC_MXU__POOL04
= 0x10,
1471 OPC_MXU_S8LDD
= 0x22,
1472 OPC_MXU__POOL16
= 0x27,
1473 OPC_MXU_S32M2I
= 0x2E,
1474 OPC_MXU_S32I2M
= 0x2F,
1475 OPC_MXU__POOL19
= 0x38,
1483 OPC_MXU_S32MAX
= 0x00,
1484 OPC_MXU_S32MIN
= 0x01,
1485 OPC_MXU_D16MAX
= 0x02,
1486 OPC_MXU_D16MIN
= 0x03,
1487 OPC_MXU_Q8MAX
= 0x04,
1488 OPC_MXU_Q8MIN
= 0x05,
1495 OPC_MXU_S32LDD
= 0x00,
1496 OPC_MXU_S32LDDR
= 0x01,
1503 OPC_MXU_S32ALNI
= 0x02,
1504 OPC_MXU_S32NOR
= 0x04,
1505 OPC_MXU_S32AND
= 0x05,
1506 OPC_MXU_S32OR
= 0x06,
1507 OPC_MXU_S32XOR
= 0x07,
1514 OPC_MXU_Q8MUL
= 0x00,
1515 OPC_MXU_Q8MULSU
= 0x01,
1519 * Overview of the TX79-specific instruction set
1520 * =============================================
1522 * The R5900 and the C790 have 128-bit wide GPRs, where the upper 64 bits
1523 * are only used by the specific quadword (128-bit) LQ/SQ load/store
1524 * instructions and certain multimedia instructions (MMIs). These MMIs
1525 * configure the 128-bit data path as two 64-bit, four 32-bit, eight 16-bit
1526 * or sixteen 8-bit paths.
1530 * The Toshiba TX System RISC TX79 Core Architecture manual,
1531 * https://wiki.qemu.org/File:C790.pdf
1533 * Three-Operand Multiply and Multiply-Add (4 instructions)
1534 * --------------------------------------------------------
1535 * MADD [rd,] rs, rt Multiply/Add
1536 * MADDU [rd,] rs, rt Multiply/Add Unsigned
1537 * MULT [rd,] rs, rt Multiply (3-operand)
1538 * MULTU [rd,] rs, rt Multiply Unsigned (3-operand)
1540 * Multiply Instructions for Pipeline 1 (10 instructions)
1541 * ------------------------------------------------------
1542 * MULT1 [rd,] rs, rt Multiply Pipeline 1
1543 * MULTU1 [rd,] rs, rt Multiply Unsigned Pipeline 1
1544 * DIV1 rs, rt Divide Pipeline 1
1545 * DIVU1 rs, rt Divide Unsigned Pipeline 1
1546 * MADD1 [rd,] rs, rt Multiply-Add Pipeline 1
1547 * MADDU1 [rd,] rs, rt Multiply-Add Unsigned Pipeline 1
1548 * MFHI1 rd Move From HI1 Register
1549 * MFLO1 rd Move From LO1 Register
1550 * MTHI1 rs Move To HI1 Register
1551 * MTLO1 rs Move To LO1 Register
1553 * Arithmetic (19 instructions)
1554 * ----------------------------
1555 * PADDB rd, rs, rt Parallel Add Byte
1556 * PSUBB rd, rs, rt Parallel Subtract Byte
1557 * PADDH rd, rs, rt Parallel Add Halfword
1558 * PSUBH rd, rs, rt Parallel Subtract Halfword
1559 * PADDW rd, rs, rt Parallel Add Word
1560 * PSUBW rd, rs, rt Parallel Subtract Word
1561 * PADSBH rd, rs, rt Parallel Add/Subtract Halfword
1562 * PADDSB rd, rs, rt Parallel Add with Signed Saturation Byte
1563 * PSUBSB rd, rs, rt Parallel Subtract with Signed Saturation Byte
1564 * PADDSH rd, rs, rt Parallel Add with Signed Saturation Halfword
1565 * PSUBSH rd, rs, rt Parallel Subtract with Signed Saturation Halfword
1566 * PADDSW rd, rs, rt Parallel Add with Signed Saturation Word
1567 * PSUBSW rd, rs, rt Parallel Subtract with Signed Saturation Word
1568 * PADDUB rd, rs, rt Parallel Add with Unsigned saturation Byte
1569 * PSUBUB rd, rs, rt Parallel Subtract with Unsigned saturation Byte
1570 * PADDUH rd, rs, rt Parallel Add with Unsigned saturation Halfword
1571 * PSUBUH rd, rs, rt Parallel Subtract with Unsigned saturation Halfword
1572 * PADDUW rd, rs, rt Parallel Add with Unsigned saturation Word
1573 * PSUBUW rd, rs, rt Parallel Subtract with Unsigned saturation Word
1575 * Min/Max (4 instructions)
1576 * ------------------------
1577 * PMAXH rd, rs, rt Parallel Maximum Halfword
1578 * PMINH rd, rs, rt Parallel Minimum Halfword
1579 * PMAXW rd, rs, rt Parallel Maximum Word
1580 * PMINW rd, rs, rt Parallel Minimum Word
1582 * Absolute (2 instructions)
1583 * -------------------------
1584 * PABSH rd, rt Parallel Absolute Halfword
1585 * PABSW rd, rt Parallel Absolute Word
1587 * Logical (4 instructions)
1588 * ------------------------
1589 * PAND rd, rs, rt Parallel AND
1590 * POR rd, rs, rt Parallel OR
1591 * PXOR rd, rs, rt Parallel XOR
1592 * PNOR rd, rs, rt Parallel NOR
1594 * Shift (9 instructions)
1595 * ----------------------
1596 * PSLLH rd, rt, sa Parallel Shift Left Logical Halfword
1597 * PSRLH rd, rt, sa Parallel Shift Right Logical Halfword
1598 * PSRAH rd, rt, sa Parallel Shift Right Arithmetic Halfword
1599 * PSLLW rd, rt, sa Parallel Shift Left Logical Word
1600 * PSRLW rd, rt, sa Parallel Shift Right Logical Word
1601 * PSRAW rd, rt, sa Parallel Shift Right Arithmetic Word
1602 * PSLLVW rd, rt, rs Parallel Shift Left Logical Variable Word
1603 * PSRLVW rd, rt, rs Parallel Shift Right Logical Variable Word
1604 * PSRAVW rd, rt, rs Parallel Shift Right Arithmetic Variable Word
1606 * Compare (6 instructions)
1607 * ------------------------
1608 * PCGTB rd, rs, rt Parallel Compare for Greater Than Byte
1609 * PCEQB rd, rs, rt Parallel Compare for Equal Byte
1610 * PCGTH rd, rs, rt Parallel Compare for Greater Than Halfword
1611 * PCEQH rd, rs, rt Parallel Compare for Equal Halfword
1612 * PCGTW rd, rs, rt Parallel Compare for Greater Than Word
1613 * PCEQW rd, rs, rt Parallel Compare for Equal Word
1615 * LZC (1 instruction)
1616 * -------------------
1617 * PLZCW rd, rs Parallel Leading Zero or One Count Word
1619 * Quadword Load and Store (2 instructions)
1620 * ----------------------------------------
1621 * LQ rt, offset(base) Load Quadword
1622 * SQ rt, offset(base) Store Quadword
1624 * Multiply and Divide (19 instructions)
1625 * -------------------------------------
1626 * PMULTW rd, rs, rt Parallel Multiply Word
1627 * PMULTUW rd, rs, rt Parallel Multiply Unsigned Word
1628 * PDIVW rs, rt Parallel Divide Word
1629 * PDIVUW rs, rt Parallel Divide Unsigned Word
1630 * PMADDW rd, rs, rt Parallel Multiply-Add Word
1631 * PMADDUW rd, rs, rt Parallel Multiply-Add Unsigned Word
1632 * PMSUBW rd, rs, rt Parallel Multiply-Subtract Word
1633 * PMULTH rd, rs, rt Parallel Multiply Halfword
1634 * PMADDH rd, rs, rt Parallel Multiply-Add Halfword
1635 * PMSUBH rd, rs, rt Parallel Multiply-Subtract Halfword
1636 * PHMADH rd, rs, rt Parallel Horizontal Multiply-Add Halfword
1637 * PHMSBH rd, rs, rt Parallel Horizontal Multiply-Subtract Halfword
1638 * PDIVBW rs, rt Parallel Divide Broadcast Word
1639 * PMFHI rd Parallel Move From HI Register
1640 * PMFLO rd Parallel Move From LO Register
1641 * PMTHI rs Parallel Move To HI Register
1642 * PMTLO rs Parallel Move To LO Register
1643 * PMFHL rd Parallel Move From HI/LO Register
1644 * PMTHL rs Parallel Move To HI/LO Register
1646 * Pack/Extend (11 instructions)
1647 * -----------------------------
1648 * PPAC5 rd, rt Parallel Pack to 5 bits
1649 * PPACB rd, rs, rt Parallel Pack to Byte
1650 * PPACH rd, rs, rt Parallel Pack to Halfword
1651 * PPACW rd, rs, rt Parallel Pack to Word
1652 * PEXT5 rd, rt Parallel Extend Upper from 5 bits
1653 * PEXTUB rd, rs, rt Parallel Extend Upper from Byte
1654 * PEXTLB rd, rs, rt Parallel Extend Lower from Byte
1655 * PEXTUH rd, rs, rt Parallel Extend Upper from Halfword
1656 * PEXTLH rd, rs, rt Parallel Extend Lower from Halfword
1657 * PEXTUW rd, rs, rt Parallel Extend Upper from Word
1658 * PEXTLW rd, rs, rt Parallel Extend Lower from Word
1660 * Others (16 instructions)
1661 * ------------------------
1662 * PCPYH rd, rt Parallel Copy Halfword
1663 * PCPYLD rd, rs, rt Parallel Copy Lower Doubleword
1664 * PCPYUD rd, rs, rt Parallel Copy Upper Doubleword
1665 * PREVH rd, rt Parallel Reverse Halfword
1666 * PINTH rd, rs, rt Parallel Interleave Halfword
1667 * PINTEH rd, rs, rt Parallel Interleave Even Halfword
1668 * PEXEH rd, rt Parallel Exchange Even Halfword
1669 * PEXCH rd, rt Parallel Exchange Center Halfword
1670 * PEXEW rd, rt Parallel Exchange Even Word
1671 * PEXCW rd, rt Parallel Exchange Center Word
1672 * QFSRV rd, rs, rt Quadword Funnel Shift Right Variable
1673 * MFSA rd Move from Shift Amount Register
1674 * MTSA rs Move to Shift Amount Register
1675 * MTSAB rs, immediate Move Byte Count to Shift Amount Register
1676 * MTSAH rs, immediate Move Halfword Count to Shift Amount Register
1677 * PROT3W rd, rt Parallel Rotate 3 Words
1679 * MMI (MultiMedia Instruction) encodings
1680 * ======================================
1682 * MMI instructions encoding table keys:
1684 * * This code is reserved for future use. An attempt to execute it
1685 * causes a Reserved Instruction exception.
1686 * % This code indicates an instruction class. The instruction word
1687 * must be further decoded by examining additional tables that show
1688 * the values for other instruction fields.
1689 * # This code is reserved for the unsupported instructions DMULT,
1690 * DMULTU, DDIV, DDIVU, LL, LLD, SC, SCD, LWC2 and SWC2. An attempt
1691 * to execute it causes a Reserved Instruction exception.
1693 * MMI instructions encoded by opcode field (MMI, LQ, SQ):
1696 * +--------+----------------------------------------+
1698 * +--------+----------------------------------------+
1700 * opcode bits 28..26
1701 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1702 * 31..29 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1703 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1704 * 0 000 |SPECIAL| REGIMM| J | JAL | BEQ | BNE | BLEZ | BGTZ
1705 * 1 001 | ADDI | ADDIU | SLTI | SLTIU | ANDI | ORI | XORI | LUI
1706 * 2 010 | COP0 | COP1 | * | * | BEQL | BNEL | BLEZL | BGTZL
1707 * 3 011 | DADDI | DADDIU| LDL | LDR | MMI% | * | LQ | SQ
1708 * 4 100 | LB | LH | LWL | LW | LBU | LHU | LWR | LWU
1709 * 5 101 | SB | SH | SWL | SW | SDL | SDR | SWR | CACHE
1710 * 6 110 | # | LWC1 | # | PREF | # | LDC1 | # | LD
1711 * 7 111 | # | SWC1 | # | * | # | SDC1 | # | SD
1715 MMI_OPC_CLASS_MMI
= 0x1C << 26, /* Same as OPC_SPECIAL2 */
1716 MMI_OPC_LQ
= 0x1E << 26, /* Same as OPC_MSA */
1717 MMI_OPC_SQ
= 0x1F << 26, /* Same as OPC_SPECIAL3 */
1721 * MMI instructions with opcode field = MMI:
1724 * +--------+-------------------------------+--------+
1725 * | MMI | |function|
1726 * +--------+-------------------------------+--------+
1728 * function bits 2..0
1729 * bits | 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7
1730 * 5..3 | 000 | 001 | 010 | 011 | 100 | 101 | 110 | 111
1731 * -------+-------+-------+-------+-------+-------+-------+-------+-------
1732 * 0 000 | MADD | MADDU | * | * | PLZCW | * | * | *
1733 * 1 001 | MMI0% | MMI2% | * | * | * | * | * | *
1734 * 2 010 | MFHI1 | MTHI1 | MFLO1 | MTLO1 | * | * | * | *
1735 * 3 011 | MULT1 | MULTU1| DIV1 | DIVU1 | * | * | * | *
1736 * 4 100 | MADD1 | MADDU1| * | * | * | * | * | *
1737 * 5 101 | MMI1% | MMI3% | * | * | * | * | * | *
1738 * 6 110 | PMFHL | PMTHL | * | * | PSLLH | * | PSRLH | PSRAH
1739 * 7 111 | * | * | * | * | PSLLW | * | PSRLW | PSRAW
1742 #define MASK_MMI(op) (MASK_OP_MAJOR(op) | ((op) & 0x3F))
1744 MMI_OPC_MADD
= 0x00 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADD */
1745 MMI_OPC_MADDU
= 0x01 | MMI_OPC_CLASS_MMI
, /* Same as OPC_MADDU */
1746 MMI_OPC_PLZCW
= 0x04 | MMI_OPC_CLASS_MMI
,
1747 MMI_OPC_CLASS_MMI0
= 0x08 | MMI_OPC_CLASS_MMI
,
1748 MMI_OPC_CLASS_MMI2
= 0x09 | MMI_OPC_CLASS_MMI
,
1749 MMI_OPC_MFHI1
= 0x10 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFHI */
1750 MMI_OPC_MTHI1
= 0x11 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTHI */
1751 MMI_OPC_MFLO1
= 0x12 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MFLO */
1752 MMI_OPC_MTLO1
= 0x13 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MTLO */
1753 MMI_OPC_MULT1
= 0x18 | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_MULT */
1754 MMI_OPC_MULTU1
= 0x19 | MMI_OPC_CLASS_MMI
, /* Same min. as OPC_MULTU */
1755 MMI_OPC_DIV1
= 0x1A | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIV */
1756 MMI_OPC_DIVU1
= 0x1B | MMI_OPC_CLASS_MMI
, /* Same minor as OPC_DIVU */
1757 MMI_OPC_MADD1
= 0x20 | MMI_OPC_CLASS_MMI
,
1758 MMI_OPC_MADDU1
= 0x21 | MMI_OPC_CLASS_MMI
,
1759 MMI_OPC_CLASS_MMI1
= 0x28 | MMI_OPC_CLASS_MMI
,
1760 MMI_OPC_CLASS_MMI3
= 0x29 | MMI_OPC_CLASS_MMI
,
1761 MMI_OPC_PMFHL
= 0x30 | MMI_OPC_CLASS_MMI
,
1762 MMI_OPC_PMTHL
= 0x31 | MMI_OPC_CLASS_MMI
,
1763 MMI_OPC_PSLLH
= 0x34 | MMI_OPC_CLASS_MMI
,
1764 MMI_OPC_PSRLH
= 0x36 | MMI_OPC_CLASS_MMI
,
1765 MMI_OPC_PSRAH
= 0x37 | MMI_OPC_CLASS_MMI
,
1766 MMI_OPC_PSLLW
= 0x3C | MMI_OPC_CLASS_MMI
,
1767 MMI_OPC_PSRLW
= 0x3E | MMI_OPC_CLASS_MMI
,
1768 MMI_OPC_PSRAW
= 0x3F | MMI_OPC_CLASS_MMI
,
1772 * MMI instructions with opcode field = MMI and bits 5..0 = MMI0:
1775 * +--------+----------------------+--------+--------+
1776 * | MMI | |function| MMI0 |
1777 * +--------+----------------------+--------+--------+
1779 * function bits 7..6
1780 * bits | 0 | 1 | 2 | 3
1781 * 10..8 | 00 | 01 | 10 | 11
1782 * -------+-------+-------+-------+-------
1783 * 0 000 | PADDW | PSUBW | PCGTW | PMAXW
1784 * 1 001 | PADDH | PSUBH | PCGTH | PMAXH
1785 * 2 010 | PADDB | PSUBB | PCGTB | *
1786 * 3 011 | * | * | * | *
1787 * 4 100 | PADDSW| PSUBSW| PEXTLW| PPACW
1788 * 5 101 | PADDSH| PSUBSH| PEXTLH| PPACH
1789 * 6 110 | PADDSB| PSUBSB| PEXTLB| PPACB
1790 * 7 111 | * | * | PEXT5 | PPAC5
1793 #define MASK_MMI0(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
1795 MMI_OPC_0_PADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI0
,
1796 MMI_OPC_0_PSUBW
= (0x01 << 6) | MMI_OPC_CLASS_MMI0
,
1797 MMI_OPC_0_PCGTW
= (0x02 << 6) | MMI_OPC_CLASS_MMI0
,
1798 MMI_OPC_0_PMAXW
= (0x03 << 6) | MMI_OPC_CLASS_MMI0
,
1799 MMI_OPC_0_PADDH
= (0x04 << 6) | MMI_OPC_CLASS_MMI0
,
1800 MMI_OPC_0_PSUBH
= (0x05 << 6) | MMI_OPC_CLASS_MMI0
,
1801 MMI_OPC_0_PCGTH
= (0x06 << 6) | MMI_OPC_CLASS_MMI0
,
1802 MMI_OPC_0_PMAXH
= (0x07 << 6) | MMI_OPC_CLASS_MMI0
,
1803 MMI_OPC_0_PADDB
= (0x08 << 6) | MMI_OPC_CLASS_MMI0
,
1804 MMI_OPC_0_PSUBB
= (0x09 << 6) | MMI_OPC_CLASS_MMI0
,
1805 MMI_OPC_0_PCGTB
= (0x0A << 6) | MMI_OPC_CLASS_MMI0
,
1806 MMI_OPC_0_PADDSW
= (0x10 << 6) | MMI_OPC_CLASS_MMI0
,
1807 MMI_OPC_0_PSUBSW
= (0x11 << 6) | MMI_OPC_CLASS_MMI0
,
1808 MMI_OPC_0_PEXTLW
= (0x12 << 6) | MMI_OPC_CLASS_MMI0
,
1809 MMI_OPC_0_PPACW
= (0x13 << 6) | MMI_OPC_CLASS_MMI0
,
1810 MMI_OPC_0_PADDSH
= (0x14 << 6) | MMI_OPC_CLASS_MMI0
,
1811 MMI_OPC_0_PSUBSH
= (0x15 << 6) | MMI_OPC_CLASS_MMI0
,
1812 MMI_OPC_0_PEXTLH
= (0x16 << 6) | MMI_OPC_CLASS_MMI0
,
1813 MMI_OPC_0_PPACH
= (0x17 << 6) | MMI_OPC_CLASS_MMI0
,
1814 MMI_OPC_0_PADDSB
= (0x18 << 6) | MMI_OPC_CLASS_MMI0
,
1815 MMI_OPC_0_PSUBSB
= (0x19 << 6) | MMI_OPC_CLASS_MMI0
,
1816 MMI_OPC_0_PEXTLB
= (0x1A << 6) | MMI_OPC_CLASS_MMI0
,
1817 MMI_OPC_0_PPACB
= (0x1B << 6) | MMI_OPC_CLASS_MMI0
,
1818 MMI_OPC_0_PEXT5
= (0x1E << 6) | MMI_OPC_CLASS_MMI0
,
1819 MMI_OPC_0_PPAC5
= (0x1F << 6) | MMI_OPC_CLASS_MMI0
,
1823 * MMI instructions with opcode field = MMI and bits 5..0 = MMI1:
1826 * +--------+----------------------+--------+--------+
1827 * | MMI | |function| MMI1 |
1828 * +--------+----------------------+--------+--------+
1830 * function bits 7..6
1831 * bits | 0 | 1 | 2 | 3
1832 * 10..8 | 00 | 01 | 10 | 11
1833 * -------+-------+-------+-------+-------
1834 * 0 000 | * | PABSW | PCEQW | PMINW
1835 * 1 001 | PADSBH| PABSH | PCEQH | PMINH
1836 * 2 010 | * | * | PCEQB | *
1837 * 3 011 | * | * | * | *
1838 * 4 100 | PADDUW| PSUBUW| PEXTUW| *
1839 * 5 101 | PADDUH| PSUBUH| PEXTUH| *
1840 * 6 110 | PADDUB| PSUBUB| PEXTUB| QFSRV
1841 * 7 111 | * | * | * | *
1844 #define MASK_MMI1(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
1846 MMI_OPC_1_PABSW
= (0x01 << 6) | MMI_OPC_CLASS_MMI1
,
1847 MMI_OPC_1_PCEQW
= (0x02 << 6) | MMI_OPC_CLASS_MMI1
,
1848 MMI_OPC_1_PMINW
= (0x03 << 6) | MMI_OPC_CLASS_MMI1
,
1849 MMI_OPC_1_PADSBH
= (0x04 << 6) | MMI_OPC_CLASS_MMI1
,
1850 MMI_OPC_1_PABSH
= (0x05 << 6) | MMI_OPC_CLASS_MMI1
,
1851 MMI_OPC_1_PCEQH
= (0x06 << 6) | MMI_OPC_CLASS_MMI1
,
1852 MMI_OPC_1_PMINH
= (0x07 << 6) | MMI_OPC_CLASS_MMI1
,
1853 MMI_OPC_1_PCEQB
= (0x0A << 6) | MMI_OPC_CLASS_MMI1
,
1854 MMI_OPC_1_PADDUW
= (0x10 << 6) | MMI_OPC_CLASS_MMI1
,
1855 MMI_OPC_1_PSUBUW
= (0x11 << 6) | MMI_OPC_CLASS_MMI1
,
1856 MMI_OPC_1_PEXTUW
= (0x12 << 6) | MMI_OPC_CLASS_MMI1
,
1857 MMI_OPC_1_PADDUH
= (0x14 << 6) | MMI_OPC_CLASS_MMI1
,
1858 MMI_OPC_1_PSUBUH
= (0x15 << 6) | MMI_OPC_CLASS_MMI1
,
1859 MMI_OPC_1_PEXTUH
= (0x16 << 6) | MMI_OPC_CLASS_MMI1
,
1860 MMI_OPC_1_PADDUB
= (0x18 << 6) | MMI_OPC_CLASS_MMI1
,
1861 MMI_OPC_1_PSUBUB
= (0x19 << 6) | MMI_OPC_CLASS_MMI1
,
1862 MMI_OPC_1_PEXTUB
= (0x1A << 6) | MMI_OPC_CLASS_MMI1
,
1863 MMI_OPC_1_QFSRV
= (0x1B << 6) | MMI_OPC_CLASS_MMI1
,
1867 * MMI instructions with opcode field = MMI and bits 5..0 = MMI2:
1870 * +--------+----------------------+--------+--------+
1871 * | MMI | |function| MMI2 |
1872 * +--------+----------------------+--------+--------+
1874 * function bits 7..6
1875 * bits | 0 | 1 | 2 | 3
1876 * 10..8 | 00 | 01 | 10 | 11
1877 * -------+-------+-------+-------+-------
1878 * 0 000 | PMADDW| * | PSLLVW| PSRLVW
1879 * 1 001 | PMSUBW| * | * | *
1880 * 2 010 | PMFHI | PMFLO | PINTH | *
1881 * 3 011 | PMULTW| PDIVW | PCPYLD| *
1882 * 4 100 | PMADDH| PHMADH| PAND | PXOR
1883 * 5 101 | PMSUBH| PHMSBH| * | *
1884 * 6 110 | * | * | PEXEH | PREVH
1885 * 7 111 | PMULTH| PDIVBW| PEXEW | PROT3W
1888 #define MASK_MMI2(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
1890 MMI_OPC_2_PMADDW
= (0x00 << 6) | MMI_OPC_CLASS_MMI2
,
1891 MMI_OPC_2_PSLLVW
= (0x02 << 6) | MMI_OPC_CLASS_MMI2
,
1892 MMI_OPC_2_PSRLVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI2
,
1893 MMI_OPC_2_PMSUBW
= (0x04 << 6) | MMI_OPC_CLASS_MMI2
,
1894 MMI_OPC_2_PMFHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI2
,
1895 MMI_OPC_2_PMFLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI2
,
1896 MMI_OPC_2_PINTH
= (0x0A << 6) | MMI_OPC_CLASS_MMI2
,
1897 MMI_OPC_2_PMULTW
= (0x0C << 6) | MMI_OPC_CLASS_MMI2
,
1898 MMI_OPC_2_PDIVW
= (0x0D << 6) | MMI_OPC_CLASS_MMI2
,
1899 MMI_OPC_2_PCPYLD
= (0x0E << 6) | MMI_OPC_CLASS_MMI2
,
1900 MMI_OPC_2_PMADDH
= (0x10 << 6) | MMI_OPC_CLASS_MMI2
,
1901 MMI_OPC_2_PHMADH
= (0x11 << 6) | MMI_OPC_CLASS_MMI2
,
1902 MMI_OPC_2_PAND
= (0x12 << 6) | MMI_OPC_CLASS_MMI2
,
1903 MMI_OPC_2_PXOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI2
,
1904 MMI_OPC_2_PMSUBH
= (0x14 << 6) | MMI_OPC_CLASS_MMI2
,
1905 MMI_OPC_2_PHMSBH
= (0x15 << 6) | MMI_OPC_CLASS_MMI2
,
1906 MMI_OPC_2_PEXEH
= (0x1A << 6) | MMI_OPC_CLASS_MMI2
,
1907 MMI_OPC_2_PREVH
= (0x1B << 6) | MMI_OPC_CLASS_MMI2
,
1908 MMI_OPC_2_PMULTH
= (0x1C << 6) | MMI_OPC_CLASS_MMI2
,
1909 MMI_OPC_2_PDIVBW
= (0x1D << 6) | MMI_OPC_CLASS_MMI2
,
1910 MMI_OPC_2_PEXEW
= (0x1E << 6) | MMI_OPC_CLASS_MMI2
,
1911 MMI_OPC_2_PROT3W
= (0x1F << 6) | MMI_OPC_CLASS_MMI2
,
1915 * MMI instructions with opcode field = MMI and bits 5..0 = MMI3:
1918 * +--------+----------------------+--------+--------+
1919 * | MMI | |function| MMI3 |
1920 * +--------+----------------------+--------+--------+
1922 * function bits 7..6
1923 * bits | 0 | 1 | 2 | 3
1924 * 10..8 | 00 | 01 | 10 | 11
1925 * -------+-------+-------+-------+-------
1926 * 0 000 |PMADDUW| * | * | PSRAVW
1927 * 1 001 | * | * | * | *
1928 * 2 010 | PMTHI | PMTLO | PINTEH| *
1929 * 3 011 |PMULTUW| PDIVUW| PCPYUD| *
1930 * 4 100 | * | * | POR | PNOR
1931 * 5 101 | * | * | * | *
1932 * 6 110 | * | * | PEXCH | PCPYH
1933 * 7 111 | * | * | PEXCW | *
1936 #define MASK_MMI3(op) (MASK_OP_MAJOR(op) | ((op) & 0x7FF))
1938 MMI_OPC_3_PMADDUW
= (0x00 << 6) | MMI_OPC_CLASS_MMI3
,
1939 MMI_OPC_3_PSRAVW
= (0x03 << 6) | MMI_OPC_CLASS_MMI3
,
1940 MMI_OPC_3_PMTHI
= (0x08 << 6) | MMI_OPC_CLASS_MMI3
,
1941 MMI_OPC_3_PMTLO
= (0x09 << 6) | MMI_OPC_CLASS_MMI3
,
1942 MMI_OPC_3_PINTEH
= (0x0A << 6) | MMI_OPC_CLASS_MMI3
,
1943 MMI_OPC_3_PMULTUW
= (0x0C << 6) | MMI_OPC_CLASS_MMI3
,
1944 MMI_OPC_3_PDIVUW
= (0x0D << 6) | MMI_OPC_CLASS_MMI3
,
1945 MMI_OPC_3_PCPYUD
= (0x0E << 6) | MMI_OPC_CLASS_MMI3
,
1946 MMI_OPC_3_POR
= (0x12 << 6) | MMI_OPC_CLASS_MMI3
,
1947 MMI_OPC_3_PNOR
= (0x13 << 6) | MMI_OPC_CLASS_MMI3
,
1948 MMI_OPC_3_PEXCH
= (0x1A << 6) | MMI_OPC_CLASS_MMI3
,
1949 MMI_OPC_3_PCPYH
= (0x1B << 6) | MMI_OPC_CLASS_MMI3
,
1950 MMI_OPC_3_PEXCW
= (0x1E << 6) | MMI_OPC_CLASS_MMI3
,
1953 /* global register indices */
1954 TCGv cpu_gpr
[32], cpu_PC
;
1956 * For CPUs using 128-bit GPR registers, we put the lower halves in cpu_gpr[])
1957 * and the upper halves in cpu_gpr_hi[].
1959 TCGv_i64 cpu_gpr_hi
[32];
1960 TCGv cpu_HI
[MIPS_DSP_ACC
], cpu_LO
[MIPS_DSP_ACC
];
1961 static TCGv cpu_dspctrl
, btarget
;
1963 static TCGv cpu_lladdr
, cpu_llval
;
1964 static TCGv_i32 hflags
;
1965 TCGv_i32 fpu_fcr0
, fpu_fcr31
;
1966 TCGv_i64 fpu_f64
[32];
1968 #if !defined(TARGET_MIPS64)
1970 static TCGv mxu_gpr
[NUMBER_OF_MXU_REGISTERS
- 1];
1974 #include "exec/gen-icount.h"
1976 #define gen_helper_0e0i(name, arg) do { \
1977 TCGv_i32 helper_tmp = tcg_const_i32(arg); \
1978 gen_helper_##name(cpu_env, helper_tmp); \
1979 tcg_temp_free_i32(helper_tmp); \
1982 #define gen_helper_0e1i(name, arg1, arg2) do { \
1983 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1984 gen_helper_##name(cpu_env, arg1, helper_tmp); \
1985 tcg_temp_free_i32(helper_tmp); \
1988 #define gen_helper_1e0i(name, ret, arg1) do { \
1989 TCGv_i32 helper_tmp = tcg_const_i32(arg1); \
1990 gen_helper_##name(ret, cpu_env, helper_tmp); \
1991 tcg_temp_free_i32(helper_tmp); \
1994 #define gen_helper_1e1i(name, ret, arg1, arg2) do { \
1995 TCGv_i32 helper_tmp = tcg_const_i32(arg2); \
1996 gen_helper_##name(ret, cpu_env, arg1, helper_tmp); \
1997 tcg_temp_free_i32(helper_tmp); \
2000 #define gen_helper_0e2i(name, arg1, arg2, arg3) do { \
2001 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2002 gen_helper_##name(cpu_env, arg1, arg2, helper_tmp); \
2003 tcg_temp_free_i32(helper_tmp); \
2006 #define gen_helper_1e2i(name, ret, arg1, arg2, arg3) do { \
2007 TCGv_i32 helper_tmp = tcg_const_i32(arg3); \
2008 gen_helper_##name(ret, cpu_env, arg1, arg2, helper_tmp); \
2009 tcg_temp_free_i32(helper_tmp); \
2012 #define gen_helper_0e3i(name, arg1, arg2, arg3, arg4) do { \
2013 TCGv_i32 helper_tmp = tcg_const_i32(arg4); \
2014 gen_helper_##name(cpu_env, arg1, arg2, arg3, helper_tmp); \
2015 tcg_temp_free_i32(helper_tmp); \
2018 #define DISAS_STOP DISAS_TARGET_0
2019 #define DISAS_EXIT DISAS_TARGET_1
2021 static const char * const regnames
[] = {
2022 "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3",
2023 "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7",
2024 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
2025 "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra",
2028 static const char * const regnames_HI
[] = {
2029 "HI0", "HI1", "HI2", "HI3",
2032 static const char * const regnames_LO
[] = {
2033 "LO0", "LO1", "LO2", "LO3",
2036 static const char * const fregnames
[] = {
2037 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
2038 "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
2039 "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
2040 "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31",
2043 #if !defined(TARGET_MIPS64)
2044 static const char * const mxuregnames
[] = {
2045 "XR1", "XR2", "XR3", "XR4", "XR5", "XR6", "XR7", "XR8",
2046 "XR9", "XR10", "XR11", "XR12", "XR13", "XR14", "XR15", "MXU_CR",
2050 /* General purpose registers moves. */
2051 void gen_load_gpr(TCGv t
, int reg
)
2054 tcg_gen_movi_tl(t
, 0);
2056 tcg_gen_mov_tl(t
, cpu_gpr
[reg
]);
2060 void gen_store_gpr(TCGv t
, int reg
)
2063 tcg_gen_mov_tl(cpu_gpr
[reg
], t
);
2067 #if defined(TARGET_MIPS64)
2068 void gen_load_gpr_hi(TCGv_i64 t
, int reg
)
2071 tcg_gen_movi_i64(t
, 0);
2073 tcg_gen_mov_i64(t
, cpu_gpr_hi
[reg
]);
2077 void gen_store_gpr_hi(TCGv_i64 t
, int reg
)
2080 tcg_gen_mov_i64(cpu_gpr_hi
[reg
], t
);
2083 #endif /* TARGET_MIPS64 */
2085 /* Moves to/from shadow registers. */
2086 static inline void gen_load_srsgpr(int from
, int to
)
2088 TCGv t0
= tcg_temp_new();
2091 tcg_gen_movi_tl(t0
, 0);
2093 TCGv_i32 t2
= tcg_temp_new_i32();
2094 TCGv_ptr addr
= tcg_temp_new_ptr();
2096 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2097 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2098 tcg_gen_andi_i32(t2
, t2
, 0xf);
2099 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2100 tcg_gen_ext_i32_ptr(addr
, t2
);
2101 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2103 tcg_gen_ld_tl(t0
, addr
, sizeof(target_ulong
) * from
);
2104 tcg_temp_free_ptr(addr
);
2105 tcg_temp_free_i32(t2
);
2107 gen_store_gpr(t0
, to
);
2111 static inline void gen_store_srsgpr(int from
, int to
)
2114 TCGv t0
= tcg_temp_new();
2115 TCGv_i32 t2
= tcg_temp_new_i32();
2116 TCGv_ptr addr
= tcg_temp_new_ptr();
2118 gen_load_gpr(t0
, from
);
2119 tcg_gen_ld_i32(t2
, cpu_env
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
2120 tcg_gen_shri_i32(t2
, t2
, CP0SRSCtl_PSS
);
2121 tcg_gen_andi_i32(t2
, t2
, 0xf);
2122 tcg_gen_muli_i32(t2
, t2
, sizeof(target_ulong
) * 32);
2123 tcg_gen_ext_i32_ptr(addr
, t2
);
2124 tcg_gen_add_ptr(addr
, cpu_env
, addr
);
2126 tcg_gen_st_tl(t0
, addr
, sizeof(target_ulong
) * to
);
2127 tcg_temp_free_ptr(addr
);
2128 tcg_temp_free_i32(t2
);
2133 #if !defined(TARGET_MIPS64)
2134 /* MXU General purpose registers moves. */
2135 static inline void gen_load_mxu_gpr(TCGv t
, unsigned int reg
)
2138 tcg_gen_movi_tl(t
, 0);
2139 } else if (reg
<= 15) {
2140 tcg_gen_mov_tl(t
, mxu_gpr
[reg
- 1]);
2144 static inline void gen_store_mxu_gpr(TCGv t
, unsigned int reg
)
2146 if (reg
> 0 && reg
<= 15) {
2147 tcg_gen_mov_tl(mxu_gpr
[reg
- 1], t
);
2151 /* MXU control register moves. */
2152 static inline void gen_load_mxu_cr(TCGv t
)
2154 tcg_gen_mov_tl(t
, mxu_CR
);
2157 static inline void gen_store_mxu_cr(TCGv t
)
2159 /* TODO: Add handling of RW rules for MXU_CR. */
2160 tcg_gen_mov_tl(mxu_CR
, t
);
2166 static inline void gen_save_pc(target_ulong pc
)
2168 tcg_gen_movi_tl(cpu_PC
, pc
);
2171 static inline void save_cpu_state(DisasContext
*ctx
, int do_save_pc
)
2173 LOG_DISAS("hflags %08x saved %08x\n", ctx
->hflags
, ctx
->saved_hflags
);
2174 if (do_save_pc
&& ctx
->base
.pc_next
!= ctx
->saved_pc
) {
2175 gen_save_pc(ctx
->base
.pc_next
);
2176 ctx
->saved_pc
= ctx
->base
.pc_next
;
2178 if (ctx
->hflags
!= ctx
->saved_hflags
) {
2179 tcg_gen_movi_i32(hflags
, ctx
->hflags
);
2180 ctx
->saved_hflags
= ctx
->hflags
;
2181 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2187 tcg_gen_movi_tl(btarget
, ctx
->btarget
);
2193 static inline void restore_cpu_state(CPUMIPSState
*env
, DisasContext
*ctx
)
2195 ctx
->saved_hflags
= ctx
->hflags
;
2196 switch (ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
2202 ctx
->btarget
= env
->btarget
;
2207 void generate_exception_err(DisasContext
*ctx
, int excp
, int err
)
2209 TCGv_i32 texcp
= tcg_const_i32(excp
);
2210 TCGv_i32 terr
= tcg_const_i32(err
);
2211 save_cpu_state(ctx
, 1);
2212 gen_helper_raise_exception_err(cpu_env
, texcp
, terr
);
2213 tcg_temp_free_i32(terr
);
2214 tcg_temp_free_i32(texcp
);
2215 ctx
->base
.is_jmp
= DISAS_NORETURN
;
2218 void generate_exception(DisasContext
*ctx
, int excp
)
2220 gen_helper_0e0i(raise_exception
, excp
);
2223 void generate_exception_end(DisasContext
*ctx
, int excp
)
2225 generate_exception_err(ctx
, excp
, 0);
2228 void gen_reserved_instruction(DisasContext
*ctx
)
2230 generate_exception_end(ctx
, EXCP_RI
);
2233 /* Floating point register moves. */
2234 void gen_load_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2236 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2237 generate_exception(ctx
, EXCP_RI
);
2239 tcg_gen_extrl_i64_i32(t
, fpu_f64
[reg
]);
2242 void gen_store_fpr32(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2245 if (ctx
->hflags
& MIPS_HFLAG_FRE
) {
2246 generate_exception(ctx
, EXCP_RI
);
2248 t64
= tcg_temp_new_i64();
2249 tcg_gen_extu_i32_i64(t64
, t
);
2250 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 0, 32);
2251 tcg_temp_free_i64(t64
);
2254 static void gen_load_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2256 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2257 tcg_gen_extrh_i64_i32(t
, fpu_f64
[reg
]);
2259 gen_load_fpr32(ctx
, t
, reg
| 1);
2263 static void gen_store_fpr32h(DisasContext
*ctx
, TCGv_i32 t
, int reg
)
2265 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2266 TCGv_i64 t64
= tcg_temp_new_i64();
2267 tcg_gen_extu_i32_i64(t64
, t
);
2268 tcg_gen_deposit_i64(fpu_f64
[reg
], fpu_f64
[reg
], t64
, 32, 32);
2269 tcg_temp_free_i64(t64
);
2271 gen_store_fpr32(ctx
, t
, reg
| 1);
2275 void gen_load_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2277 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2278 tcg_gen_mov_i64(t
, fpu_f64
[reg
]);
2280 tcg_gen_concat32_i64(t
, fpu_f64
[reg
& ~1], fpu_f64
[reg
| 1]);
2284 void gen_store_fpr64(DisasContext
*ctx
, TCGv_i64 t
, int reg
)
2286 if (ctx
->hflags
& MIPS_HFLAG_F64
) {
2287 tcg_gen_mov_i64(fpu_f64
[reg
], t
);
2290 tcg_gen_deposit_i64(fpu_f64
[reg
& ~1], fpu_f64
[reg
& ~1], t
, 0, 32);
2291 t0
= tcg_temp_new_i64();
2292 tcg_gen_shri_i64(t0
, t
, 32);
2293 tcg_gen_deposit_i64(fpu_f64
[reg
| 1], fpu_f64
[reg
| 1], t0
, 0, 32);
2294 tcg_temp_free_i64(t0
);
2298 int get_fp_bit(int cc
)
2307 /* Addresses computation */
2308 void gen_op_addr_add(DisasContext
*ctx
, TCGv ret
, TCGv arg0
, TCGv arg1
)
2310 tcg_gen_add_tl(ret
, arg0
, arg1
);
2312 #if defined(TARGET_MIPS64)
2313 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2314 tcg_gen_ext32s_i64(ret
, ret
);
2319 static inline void gen_op_addr_addi(DisasContext
*ctx
, TCGv ret
, TCGv base
,
2322 tcg_gen_addi_tl(ret
, base
, ofs
);
2324 #if defined(TARGET_MIPS64)
2325 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2326 tcg_gen_ext32s_i64(ret
, ret
);
2331 /* Addresses computation (translation time) */
2332 static target_long
addr_add(DisasContext
*ctx
, target_long base
,
2335 target_long sum
= base
+ offset
;
2337 #if defined(TARGET_MIPS64)
2338 if (ctx
->hflags
& MIPS_HFLAG_AWRAP
) {
2345 /* Sign-extract the low 32-bits to a target_long. */
2346 void gen_move_low32(TCGv ret
, TCGv_i64 arg
)
2348 #if defined(TARGET_MIPS64)
2349 tcg_gen_ext32s_i64(ret
, arg
);
2351 tcg_gen_extrl_i64_i32(ret
, arg
);
2355 /* Sign-extract the high 32-bits to a target_long. */
2356 void gen_move_high32(TCGv ret
, TCGv_i64 arg
)
2358 #if defined(TARGET_MIPS64)
2359 tcg_gen_sari_i64(ret
, arg
, 32);
2361 tcg_gen_extrh_i64_i32(ret
, arg
);
2365 void check_cp0_enabled(DisasContext
*ctx
)
2367 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2368 generate_exception_end(ctx
, EXCP_CpU
);
2372 void check_cp1_enabled(DisasContext
*ctx
)
2374 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_FPU
))) {
2375 generate_exception_err(ctx
, EXCP_CpU
, 1);
2380 * Verify that the processor is running with COP1X instructions enabled.
2381 * This is associated with the nabla symbol in the MIPS32 and MIPS64
2384 void check_cop1x(DisasContext
*ctx
)
2386 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_COP1X
))) {
2387 gen_reserved_instruction(ctx
);
2392 * Verify that the processor is running with 64-bit floating-point
2393 * operations enabled.
2395 void check_cp1_64bitmode(DisasContext
*ctx
)
2397 if (unlikely(~ctx
->hflags
& (MIPS_HFLAG_F64
| MIPS_HFLAG_COP1X
))) {
2398 gen_reserved_instruction(ctx
);
2403 * Verify if floating point register is valid; an operation is not defined
2404 * if bit 0 of any register specification is set and the FR bit in the
2405 * Status register equals zero, since the register numbers specify an
2406 * even-odd pair of adjacent coprocessor general registers. When the FR bit
2407 * in the Status register equals one, both even and odd register numbers
2408 * are valid. This limitation exists only for 64 bit wide (d,l,ps) registers.
2410 * Multiple 64 bit wide registers can be checked by calling
2411 * gen_op_cp1_registers(freg1 | freg2 | ... | fregN);
2413 void check_cp1_registers(DisasContext
*ctx
, int regs
)
2415 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_F64
) && (regs
& 1))) {
2416 gen_reserved_instruction(ctx
);
2421 * Verify that the processor is running with DSP instructions enabled.
2422 * This is enabled by CP0 Status register MX(24) bit.
2424 static inline void check_dsp(DisasContext
*ctx
)
2426 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP
))) {
2427 if (ctx
->insn_flags
& ASE_DSP
) {
2428 generate_exception_end(ctx
, EXCP_DSPDIS
);
2430 gen_reserved_instruction(ctx
);
2435 static inline void check_dsp_r2(DisasContext
*ctx
)
2437 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R2
))) {
2438 if (ctx
->insn_flags
& ASE_DSP
) {
2439 generate_exception_end(ctx
, EXCP_DSPDIS
);
2441 gen_reserved_instruction(ctx
);
2446 static inline void check_dsp_r3(DisasContext
*ctx
)
2448 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_DSP_R3
))) {
2449 if (ctx
->insn_flags
& ASE_DSP
) {
2450 generate_exception_end(ctx
, EXCP_DSPDIS
);
2452 gen_reserved_instruction(ctx
);
2458 * This code generates a "reserved instruction" exception if the
2459 * CPU does not support the instruction set corresponding to flags.
2461 void check_insn(DisasContext
*ctx
, uint64_t flags
)
2463 if (unlikely(!(ctx
->insn_flags
& flags
))) {
2464 gen_reserved_instruction(ctx
);
2469 * This code generates a "reserved instruction" exception if the
2470 * CPU has corresponding flag set which indicates that the instruction
2473 static inline void check_insn_opc_removed(DisasContext
*ctx
, uint64_t flags
)
2475 if (unlikely(ctx
->insn_flags
& flags
)) {
2476 gen_reserved_instruction(ctx
);
2481 * The Linux kernel traps certain reserved instruction exceptions to
2482 * emulate the corresponding instructions. QEMU is the kernel in user
2483 * mode, so those traps are emulated by accepting the instructions.
2485 * A reserved instruction exception is generated for flagged CPUs if
2486 * QEMU runs in system mode.
2488 static inline void check_insn_opc_user_only(DisasContext
*ctx
, uint64_t flags
)
2490 #ifndef CONFIG_USER_ONLY
2491 check_insn_opc_removed(ctx
, flags
);
2496 * This code generates a "reserved instruction" exception if the
2497 * CPU does not support 64-bit paired-single (PS) floating point data type.
2499 static inline void check_ps(DisasContext
*ctx
)
2501 if (unlikely(!ctx
->ps
)) {
2502 generate_exception(ctx
, EXCP_RI
);
2504 check_cp1_64bitmode(ctx
);
2508 * This code generates a "reserved instruction" exception if cpu is not
2509 * 64-bit or 64-bit instructions are not enabled.
2511 void check_mips_64(DisasContext
*ctx
)
2513 if (unlikely((TARGET_LONG_BITS
!= 64) || !(ctx
->hflags
& MIPS_HFLAG_64
))) {
2514 gen_reserved_instruction(ctx
);
2518 #ifndef CONFIG_USER_ONLY
2519 static inline void check_mvh(DisasContext
*ctx
)
2521 if (unlikely(!ctx
->mvh
)) {
2522 generate_exception(ctx
, EXCP_RI
);
2528 * This code generates a "reserved instruction" exception if the
2529 * Config5 XNP bit is set.
2531 static inline void check_xnp(DisasContext
*ctx
)
2533 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_XNP
))) {
2534 gen_reserved_instruction(ctx
);
2538 #ifndef CONFIG_USER_ONLY
2540 * This code generates a "reserved instruction" exception if the
2541 * Config3 PW bit is NOT set.
2543 static inline void check_pw(DisasContext
*ctx
)
2545 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_PW
)))) {
2546 gen_reserved_instruction(ctx
);
2552 * This code generates a "reserved instruction" exception if the
2553 * Config3 MT bit is NOT set.
2555 static inline void check_mt(DisasContext
*ctx
)
2557 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2558 gen_reserved_instruction(ctx
);
2562 #ifndef CONFIG_USER_ONLY
2564 * This code generates a "coprocessor unusable" exception if CP0 is not
2565 * available, and, if that is not the case, generates a "reserved instruction"
2566 * exception if the Config5 MT bit is NOT set. This is needed for availability
2567 * control of some of MT ASE instructions.
2569 static inline void check_cp0_mt(DisasContext
*ctx
)
2571 if (unlikely(!(ctx
->hflags
& MIPS_HFLAG_CP0
))) {
2572 generate_exception_end(ctx
, EXCP_CpU
);
2574 if (unlikely(!(ctx
->CP0_Config3
& (1 << CP0C3_MT
)))) {
2575 gen_reserved_instruction(ctx
);
2582 * This code generates a "reserved instruction" exception if the
2583 * Config5 NMS bit is set.
2585 static inline void check_nms(DisasContext
*ctx
)
2587 if (unlikely(ctx
->CP0_Config5
& (1 << CP0C5_NMS
))) {
2588 gen_reserved_instruction(ctx
);
2593 * This code generates a "reserved instruction" exception if the
2594 * Config5 NMS bit is set, and Config1 DL, Config1 IL, Config2 SL,
2595 * Config2 TL, and Config5 L2C are unset.
2597 static inline void check_nms_dl_il_sl_tl_l2c(DisasContext
*ctx
)
2599 if (unlikely((ctx
->CP0_Config5
& (1 << CP0C5_NMS
)) &&
2600 !(ctx
->CP0_Config1
& (1 << CP0C1_DL
)) &&
2601 !(ctx
->CP0_Config1
& (1 << CP0C1_IL
)) &&
2602 !(ctx
->CP0_Config2
& (1 << CP0C2_SL
)) &&
2603 !(ctx
->CP0_Config2
& (1 << CP0C2_TL
)) &&
2604 !(ctx
->CP0_Config5
& (1 << CP0C5_L2C
)))) {
2605 gen_reserved_instruction(ctx
);
2610 * This code generates a "reserved instruction" exception if the
2611 * Config5 EVA bit is NOT set.
2613 static inline void check_eva(DisasContext
*ctx
)
2615 if (unlikely(!(ctx
->CP0_Config5
& (1 << CP0C5_EVA
)))) {
2616 gen_reserved_instruction(ctx
);
2622 * Define small wrappers for gen_load_fpr* so that we have a uniform
2623 * calling interface for 32 and 64-bit FPRs. No sense in changing
2624 * all callers for gen_load_fpr32 when we need the CTX parameter for
2627 #define gen_ldcmp_fpr32(ctx, x, y) gen_load_fpr32(ctx, x, y)
2628 #define gen_ldcmp_fpr64(ctx, x, y) gen_load_fpr64(ctx, x, y)
2629 #define FOP_CONDS(type, abs, fmt, ifmt, bits) \
2630 static inline void gen_cmp ## type ## _ ## fmt(DisasContext *ctx, int n, \
2631 int ft, int fs, int cc) \
2633 TCGv_i##bits fp0 = tcg_temp_new_i##bits(); \
2634 TCGv_i##bits fp1 = tcg_temp_new_i##bits(); \
2643 check_cp1_registers(ctx, fs | ft); \
2651 gen_ldcmp_fpr##bits(ctx, fp0, fs); \
2652 gen_ldcmp_fpr##bits(ctx, fp1, ft); \
2655 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _f, fp0, fp1, cc); \
2658 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _un, fp0, fp1, cc); \
2661 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _eq, fp0, fp1, cc); \
2664 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ueq, fp0, fp1, cc); \
2667 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _olt, fp0, fp1, cc); \
2670 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ult, fp0, fp1, cc); \
2673 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ole, fp0, fp1, cc); \
2676 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ule, fp0, fp1, cc); \
2679 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _sf, fp0, fp1, cc); \
2682 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngle, fp0, fp1, cc); \
2685 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _seq, fp0, fp1, cc); \
2688 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngl, fp0, fp1, cc); \
2691 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _lt, fp0, fp1, cc); \
2694 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _nge, fp0, fp1, cc); \
2697 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _le, fp0, fp1, cc); \
2700 gen_helper_0e2i(cmp ## type ## _ ## fmt ## _ngt, fp0, fp1, cc); \
2705 tcg_temp_free_i##bits(fp0); \
2706 tcg_temp_free_i##bits(fp1); \
2709 FOP_CONDS(, 0, d
, FMT_D
, 64)
2710 FOP_CONDS(abs
, 1, d
, FMT_D
, 64)
2711 FOP_CONDS(, 0, s
, FMT_S
, 32)
2712 FOP_CONDS(abs
, 1, s
, FMT_S
, 32)
2713 FOP_CONDS(, 0, ps
, FMT_PS
, 64)
2714 FOP_CONDS(abs
, 1, ps
, FMT_PS
, 64)
2717 #define FOP_CONDNS(fmt, ifmt, bits, STORE) \
2718 static inline void gen_r6_cmp_ ## fmt(DisasContext *ctx, int n, \
2719 int ft, int fs, int fd) \
2721 TCGv_i ## bits fp0 = tcg_temp_new_i ## bits(); \
2722 TCGv_i ## bits fp1 = tcg_temp_new_i ## bits(); \
2723 if (ifmt == FMT_D) { \
2724 check_cp1_registers(ctx, fs | ft | fd); \
2726 gen_ldcmp_fpr ## bits(ctx, fp0, fs); \
2727 gen_ldcmp_fpr ## bits(ctx, fp1, ft); \
2730 gen_helper_r6_cmp_ ## fmt ## _af(fp0, cpu_env, fp0, fp1); \
2733 gen_helper_r6_cmp_ ## fmt ## _un(fp0, cpu_env, fp0, fp1); \
2736 gen_helper_r6_cmp_ ## fmt ## _eq(fp0, cpu_env, fp0, fp1); \
2739 gen_helper_r6_cmp_ ## fmt ## _ueq(fp0, cpu_env, fp0, fp1); \
2742 gen_helper_r6_cmp_ ## fmt ## _lt(fp0, cpu_env, fp0, fp1); \
2745 gen_helper_r6_cmp_ ## fmt ## _ult(fp0, cpu_env, fp0, fp1); \
2748 gen_helper_r6_cmp_ ## fmt ## _le(fp0, cpu_env, fp0, fp1); \
2751 gen_helper_r6_cmp_ ## fmt ## _ule(fp0, cpu_env, fp0, fp1); \
2754 gen_helper_r6_cmp_ ## fmt ## _saf(fp0, cpu_env, fp0, fp1); \
2757 gen_helper_r6_cmp_ ## fmt ## _sun(fp0, cpu_env, fp0, fp1); \
2760 gen_helper_r6_cmp_ ## fmt ## _seq(fp0, cpu_env, fp0, fp1); \
2763 gen_helper_r6_cmp_ ## fmt ## _sueq(fp0, cpu_env, fp0, fp1); \
2766 gen_helper_r6_cmp_ ## fmt ## _slt(fp0, cpu_env, fp0, fp1); \
2769 gen_helper_r6_cmp_ ## fmt ## _sult(fp0, cpu_env, fp0, fp1); \
2772 gen_helper_r6_cmp_ ## fmt ## _sle(fp0, cpu_env, fp0, fp1); \
2775 gen_helper_r6_cmp_ ## fmt ## _sule(fp0, cpu_env, fp0, fp1); \
2778 gen_helper_r6_cmp_ ## fmt ## _or(fp0, cpu_env, fp0, fp1); \
2781 gen_helper_r6_cmp_ ## fmt ## _une(fp0, cpu_env, fp0, fp1); \
2784 gen_helper_r6_cmp_ ## fmt ## _ne(fp0, cpu_env, fp0, fp1); \
2787 gen_helper_r6_cmp_ ## fmt ## _sor(fp0, cpu_env, fp0, fp1); \
2790 gen_helper_r6_cmp_ ## fmt ## _sune(fp0, cpu_env, fp0, fp1); \
2793 gen_helper_r6_cmp_ ## fmt ## _sne(fp0, cpu_env, fp0, fp1); \
2799 tcg_temp_free_i ## bits(fp0); \
2800 tcg_temp_free_i ## bits(fp1); \
2803 FOP_CONDNS(d
, FMT_D
, 64, gen_store_fpr64(ctx
, fp0
, fd
))
2804 FOP_CONDNS(s
, FMT_S
, 32, gen_store_fpr32(ctx
, fp0
, fd
))
2806 #undef gen_ldcmp_fpr32
2807 #undef gen_ldcmp_fpr64
2809 /* load/store instructions. */
2810 #ifdef CONFIG_USER_ONLY
2811 #define OP_LD_ATOMIC(insn, fname) \
2812 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2813 DisasContext *ctx) \
2815 TCGv t0 = tcg_temp_new(); \
2816 tcg_gen_mov_tl(t0, arg1); \
2817 tcg_gen_qemu_##fname(ret, arg1, ctx->mem_idx); \
2818 tcg_gen_st_tl(t0, cpu_env, offsetof(CPUMIPSState, lladdr)); \
2819 tcg_gen_st_tl(ret, cpu_env, offsetof(CPUMIPSState, llval)); \
2820 tcg_temp_free(t0); \
2823 #define OP_LD_ATOMIC(insn, fname) \
2824 static inline void op_ld_##insn(TCGv ret, TCGv arg1, int mem_idx, \
2825 DisasContext *ctx) \
2827 gen_helper_1e1i(insn, ret, arg1, mem_idx); \
2830 OP_LD_ATOMIC(ll
, ld32s
);
2831 #if defined(TARGET_MIPS64)
2832 OP_LD_ATOMIC(lld
, ld64
);
2836 void gen_base_offset_addr(DisasContext
*ctx
, TCGv addr
, int base
, int offset
)
2839 tcg_gen_movi_tl(addr
, offset
);
2840 } else if (offset
== 0) {
2841 gen_load_gpr(addr
, base
);
2843 tcg_gen_movi_tl(addr
, offset
);
2844 gen_op_addr_add(ctx
, addr
, cpu_gpr
[base
], addr
);
2848 static target_ulong
pc_relative_pc(DisasContext
*ctx
)
2850 target_ulong pc
= ctx
->base
.pc_next
;
2852 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
2853 int branch_bytes
= ctx
->hflags
& MIPS_HFLAG_BDS16
? 2 : 4;
2858 pc
&= ~(target_ulong
)3;
2863 static void gen_ld(DisasContext
*ctx
, uint32_t opc
,
2864 int rt
, int base
, int offset
)
2867 int mem_idx
= ctx
->mem_idx
;
2869 if (rt
== 0 && ctx
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
|
2872 * Loongson CPU uses a load to zero register for prefetch.
2873 * We emulate it as a NOP. On other CPU we must perform the
2874 * actual memory access.
2879 t0
= tcg_temp_new();
2880 gen_base_offset_addr(ctx
, t0
, base
, offset
);
2883 #if defined(TARGET_MIPS64)
2885 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
|
2886 ctx
->default_tcg_memop_mask
);
2887 gen_store_gpr(t0
, rt
);
2890 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
|
2891 ctx
->default_tcg_memop_mask
);
2892 gen_store_gpr(t0
, rt
);
2896 op_ld_lld(t0
, t0
, mem_idx
, ctx
);
2897 gen_store_gpr(t0
, rt
);
2900 t1
= tcg_temp_new();
2902 * Do a byte access to possibly trigger a page
2903 * fault with the unaligned address.
2905 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2906 tcg_gen_andi_tl(t1
, t0
, 7);
2907 #ifndef TARGET_WORDS_BIGENDIAN
2908 tcg_gen_xori_tl(t1
, t1
, 7);
2910 tcg_gen_shli_tl(t1
, t1
, 3);
2911 tcg_gen_andi_tl(t0
, t0
, ~7);
2912 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2913 tcg_gen_shl_tl(t0
, t0
, t1
);
2914 t2
= tcg_const_tl(-1);
2915 tcg_gen_shl_tl(t2
, t2
, t1
);
2916 gen_load_gpr(t1
, rt
);
2917 tcg_gen_andc_tl(t1
, t1
, t2
);
2919 tcg_gen_or_tl(t0
, t0
, t1
);
2921 gen_store_gpr(t0
, rt
);
2924 t1
= tcg_temp_new();
2926 * Do a byte access to possibly trigger a page
2927 * fault with the unaligned address.
2929 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
2930 tcg_gen_andi_tl(t1
, t0
, 7);
2931 #ifdef TARGET_WORDS_BIGENDIAN
2932 tcg_gen_xori_tl(t1
, t1
, 7);
2934 tcg_gen_shli_tl(t1
, t1
, 3);
2935 tcg_gen_andi_tl(t0
, t0
, ~7);
2936 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2937 tcg_gen_shr_tl(t0
, t0
, t1
);
2938 tcg_gen_xori_tl(t1
, t1
, 63);
2939 t2
= tcg_const_tl(0xfffffffffffffffeull
);
2940 tcg_gen_shl_tl(t2
, t2
, t1
);
2941 gen_load_gpr(t1
, rt
);
2942 tcg_gen_and_tl(t1
, t1
, t2
);
2944 tcg_gen_or_tl(t0
, t0
, t1
);
2946 gen_store_gpr(t0
, rt
);
2949 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2950 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2952 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEQ
);
2953 gen_store_gpr(t0
, rt
);
2957 t1
= tcg_const_tl(pc_relative_pc(ctx
));
2958 gen_op_addr_add(ctx
, t0
, t0
, t1
);
2960 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
);
2961 gen_store_gpr(t0
, rt
);
2964 mem_idx
= MIPS_HFLAG_UM
;
2967 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESL
|
2968 ctx
->default_tcg_memop_mask
);
2969 gen_store_gpr(t0
, rt
);
2972 mem_idx
= MIPS_HFLAG_UM
;
2975 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TESW
|
2976 ctx
->default_tcg_memop_mask
);
2977 gen_store_gpr(t0
, rt
);
2980 mem_idx
= MIPS_HFLAG_UM
;
2983 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUW
|
2984 ctx
->default_tcg_memop_mask
);
2985 gen_store_gpr(t0
, rt
);
2988 mem_idx
= MIPS_HFLAG_UM
;
2991 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_SB
);
2992 gen_store_gpr(t0
, rt
);
2995 mem_idx
= MIPS_HFLAG_UM
;
2998 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_UB
);
2999 gen_store_gpr(t0
, rt
);
3002 mem_idx
= MIPS_HFLAG_UM
;
3005 t1
= tcg_temp_new();
3007 * Do a byte access to possibly trigger a page
3008 * fault with the unaligned address.
3010 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3011 tcg_gen_andi_tl(t1
, t0
, 3);
3012 #ifndef TARGET_WORDS_BIGENDIAN
3013 tcg_gen_xori_tl(t1
, t1
, 3);
3015 tcg_gen_shli_tl(t1
, t1
, 3);
3016 tcg_gen_andi_tl(t0
, t0
, ~3);
3017 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3018 tcg_gen_shl_tl(t0
, t0
, t1
);
3019 t2
= tcg_const_tl(-1);
3020 tcg_gen_shl_tl(t2
, t2
, t1
);
3021 gen_load_gpr(t1
, rt
);
3022 tcg_gen_andc_tl(t1
, t1
, t2
);
3024 tcg_gen_or_tl(t0
, t0
, t1
);
3026 tcg_gen_ext32s_tl(t0
, t0
);
3027 gen_store_gpr(t0
, rt
);
3030 mem_idx
= MIPS_HFLAG_UM
;
3033 t1
= tcg_temp_new();
3035 * Do a byte access to possibly trigger a page
3036 * fault with the unaligned address.
3038 tcg_gen_qemu_ld_tl(t1
, t0
, mem_idx
, MO_UB
);
3039 tcg_gen_andi_tl(t1
, t0
, 3);
3040 #ifdef TARGET_WORDS_BIGENDIAN
3041 tcg_gen_xori_tl(t1
, t1
, 3);
3043 tcg_gen_shli_tl(t1
, t1
, 3);
3044 tcg_gen_andi_tl(t0
, t0
, ~3);
3045 tcg_gen_qemu_ld_tl(t0
, t0
, mem_idx
, MO_TEUL
);
3046 tcg_gen_shr_tl(t0
, t0
, t1
);
3047 tcg_gen_xori_tl(t1
, t1
, 31);
3048 t2
= tcg_const_tl(0xfffffffeull
);
3049 tcg_gen_shl_tl(t2
, t2
, t1
);
3050 gen_load_gpr(t1
, rt
);
3051 tcg_gen_and_tl(t1
, t1
, t2
);
3053 tcg_gen_or_tl(t0
, t0
, t1
);
3055 tcg_gen_ext32s_tl(t0
, t0
);
3056 gen_store_gpr(t0
, rt
);
3059 mem_idx
= MIPS_HFLAG_UM
;
3063 op_ld_ll(t0
, t0
, mem_idx
, ctx
);
3064 gen_store_gpr(t0
, rt
);
3070 static void gen_llwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3071 uint32_t reg1
, uint32_t reg2
)
3073 TCGv taddr
= tcg_temp_new();
3074 TCGv_i64 tval
= tcg_temp_new_i64();
3075 TCGv tmp1
= tcg_temp_new();
3076 TCGv tmp2
= tcg_temp_new();
3078 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3079 tcg_gen_qemu_ld64(tval
, taddr
, ctx
->mem_idx
);
3080 #ifdef TARGET_WORDS_BIGENDIAN
3081 tcg_gen_extr_i64_tl(tmp2
, tmp1
, tval
);
3083 tcg_gen_extr_i64_tl(tmp1
, tmp2
, tval
);
3085 gen_store_gpr(tmp1
, reg1
);
3086 tcg_temp_free(tmp1
);
3087 gen_store_gpr(tmp2
, reg2
);
3088 tcg_temp_free(tmp2
);
3089 tcg_gen_st_i64(tval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3090 tcg_temp_free_i64(tval
);
3091 tcg_gen_st_tl(taddr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3092 tcg_temp_free(taddr
);
3096 static void gen_st(DisasContext
*ctx
, uint32_t opc
, int rt
,
3097 int base
, int offset
)
3099 TCGv t0
= tcg_temp_new();
3100 TCGv t1
= tcg_temp_new();
3101 int mem_idx
= ctx
->mem_idx
;
3103 gen_base_offset_addr(ctx
, t0
, base
, offset
);
3104 gen_load_gpr(t1
, rt
);
3106 #if defined(TARGET_MIPS64)
3108 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEQ
|
3109 ctx
->default_tcg_memop_mask
);
3112 gen_helper_0e2i(sdl
, t1
, t0
, mem_idx
);
3115 gen_helper_0e2i(sdr
, t1
, t0
, mem_idx
);
3119 mem_idx
= MIPS_HFLAG_UM
;
3122 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUL
|
3123 ctx
->default_tcg_memop_mask
);
3126 mem_idx
= MIPS_HFLAG_UM
;
3129 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_TEUW
|
3130 ctx
->default_tcg_memop_mask
);
3133 mem_idx
= MIPS_HFLAG_UM
;
3136 tcg_gen_qemu_st_tl(t1
, t0
, mem_idx
, MO_8
);
3139 mem_idx
= MIPS_HFLAG_UM
;
3142 gen_helper_0e2i(swl
, t1
, t0
, mem_idx
);
3145 mem_idx
= MIPS_HFLAG_UM
;
3148 gen_helper_0e2i(swr
, t1
, t0
, mem_idx
);
3156 /* Store conditional */
3157 static void gen_st_cond(DisasContext
*ctx
, int rt
, int base
, int offset
,
3158 MemOp tcg_mo
, bool eva
)
3161 TCGLabel
*l1
= gen_new_label();
3162 TCGLabel
*done
= gen_new_label();
3164 t0
= tcg_temp_new();
3165 addr
= tcg_temp_new();
3166 /* compare the address against that of the preceding LL */
3167 gen_base_offset_addr(ctx
, addr
, base
, offset
);
3168 tcg_gen_brcond_tl(TCG_COND_EQ
, addr
, cpu_lladdr
, l1
);
3169 tcg_temp_free(addr
);
3170 tcg_gen_movi_tl(t0
, 0);
3171 gen_store_gpr(t0
, rt
);
3175 /* generate cmpxchg */
3176 val
= tcg_temp_new();
3177 gen_load_gpr(val
, rt
);
3178 tcg_gen_atomic_cmpxchg_tl(t0
, cpu_lladdr
, cpu_llval
, val
,
3179 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, tcg_mo
);
3180 tcg_gen_setcond_tl(TCG_COND_EQ
, t0
, t0
, cpu_llval
);
3181 gen_store_gpr(t0
, rt
);
3184 gen_set_label(done
);
3189 static void gen_scwp(DisasContext
*ctx
, uint32_t base
, int16_t offset
,
3190 uint32_t reg1
, uint32_t reg2
, bool eva
)
3192 TCGv taddr
= tcg_temp_local_new();
3193 TCGv lladdr
= tcg_temp_local_new();
3194 TCGv_i64 tval
= tcg_temp_new_i64();
3195 TCGv_i64 llval
= tcg_temp_new_i64();
3196 TCGv_i64 val
= tcg_temp_new_i64();
3197 TCGv tmp1
= tcg_temp_new();
3198 TCGv tmp2
= tcg_temp_new();
3199 TCGLabel
*lab_fail
= gen_new_label();
3200 TCGLabel
*lab_done
= gen_new_label();
3202 gen_base_offset_addr(ctx
, taddr
, base
, offset
);
3204 tcg_gen_ld_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3205 tcg_gen_brcond_tl(TCG_COND_NE
, taddr
, lladdr
, lab_fail
);
3207 gen_load_gpr(tmp1
, reg1
);
3208 gen_load_gpr(tmp2
, reg2
);
3210 #ifdef TARGET_WORDS_BIGENDIAN
3211 tcg_gen_concat_tl_i64(tval
, tmp2
, tmp1
);
3213 tcg_gen_concat_tl_i64(tval
, tmp1
, tmp2
);
3216 tcg_gen_ld_i64(llval
, cpu_env
, offsetof(CPUMIPSState
, llval_wp
));
3217 tcg_gen_atomic_cmpxchg_i64(val
, taddr
, llval
, tval
,
3218 eva
? MIPS_HFLAG_UM
: ctx
->mem_idx
, MO_64
);
3220 tcg_gen_movi_tl(cpu_gpr
[reg1
], 1);
3222 tcg_gen_brcond_i64(TCG_COND_EQ
, val
, llval
, lab_done
);
3224 gen_set_label(lab_fail
);
3227 tcg_gen_movi_tl(cpu_gpr
[reg1
], 0);
3229 gen_set_label(lab_done
);
3230 tcg_gen_movi_tl(lladdr
, -1);
3231 tcg_gen_st_tl(lladdr
, cpu_env
, offsetof(CPUMIPSState
, lladdr
));
3234 /* Load and store */
3235 static void gen_flt_ldst(DisasContext
*ctx
, uint32_t opc
, int ft
,
3239 * Don't do NOP if destination is zero: we must perform the actual
3245 TCGv_i32 fp0
= tcg_temp_new_i32();
3246 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
3247 ctx
->default_tcg_memop_mask
);
3248 gen_store_fpr32(ctx
, fp0
, ft
);
3249 tcg_temp_free_i32(fp0
);
3254 TCGv_i32 fp0
= tcg_temp_new_i32();
3255 gen_load_fpr32(ctx
, fp0
, ft
);
3256 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
3257 ctx
->default_tcg_memop_mask
);
3258 tcg_temp_free_i32(fp0
);
3263 TCGv_i64 fp0
= tcg_temp_new_i64();
3264 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3265 ctx
->default_tcg_memop_mask
);
3266 gen_store_fpr64(ctx
, fp0
, ft
);
3267 tcg_temp_free_i64(fp0
);
3272 TCGv_i64 fp0
= tcg_temp_new_i64();
3273 gen_load_fpr64(ctx
, fp0
, ft
);
3274 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
|
3275 ctx
->default_tcg_memop_mask
);
3276 tcg_temp_free_i64(fp0
);
3280 MIPS_INVAL("flt_ldst");
3281 gen_reserved_instruction(ctx
);
3286 static void gen_cop1_ldst(DisasContext
*ctx
, uint32_t op
, int rt
,
3287 int rs
, int16_t imm
)
3289 TCGv t0
= tcg_temp_new();
3291 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
3292 check_cp1_enabled(ctx
);
3296 check_insn(ctx
, ISA_MIPS2
);
3299 gen_base_offset_addr(ctx
, t0
, rs
, imm
);
3300 gen_flt_ldst(ctx
, op
, rt
, t0
);
3303 generate_exception_err(ctx
, EXCP_CpU
, 1);
3308 /* Arithmetic with immediate operand */
3309 static void gen_arith_imm(DisasContext
*ctx
, uint32_t opc
,
3310 int rt
, int rs
, int imm
)
3312 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3314 if (rt
== 0 && opc
!= OPC_ADDI
&& opc
!= OPC_DADDI
) {
3316 * If no destination, treat it as a NOP.
3317 * For addi, we must generate the overflow exception when needed.
3324 TCGv t0
= tcg_temp_local_new();
3325 TCGv t1
= tcg_temp_new();
3326 TCGv t2
= tcg_temp_new();
3327 TCGLabel
*l1
= gen_new_label();
3329 gen_load_gpr(t1
, rs
);
3330 tcg_gen_addi_tl(t0
, t1
, uimm
);
3331 tcg_gen_ext32s_tl(t0
, t0
);
3333 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3334 tcg_gen_xori_tl(t2
, t0
, uimm
);
3335 tcg_gen_and_tl(t1
, t1
, t2
);
3337 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3339 /* operands of same sign, result different sign */
3340 generate_exception(ctx
, EXCP_OVERFLOW
);
3342 tcg_gen_ext32s_tl(t0
, t0
);
3343 gen_store_gpr(t0
, rt
);
3349 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3350 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3352 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3355 #if defined(TARGET_MIPS64)
3358 TCGv t0
= tcg_temp_local_new();
3359 TCGv t1
= tcg_temp_new();
3360 TCGv t2
= tcg_temp_new();
3361 TCGLabel
*l1
= gen_new_label();
3363 gen_load_gpr(t1
, rs
);
3364 tcg_gen_addi_tl(t0
, t1
, uimm
);
3366 tcg_gen_xori_tl(t1
, t1
, ~uimm
);
3367 tcg_gen_xori_tl(t2
, t0
, uimm
);
3368 tcg_gen_and_tl(t1
, t1
, t2
);
3370 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3372 /* operands of same sign, result different sign */
3373 generate_exception(ctx
, EXCP_OVERFLOW
);
3375 gen_store_gpr(t0
, rt
);
3381 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3383 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3390 /* Logic with immediate operand */
3391 static void gen_logic_imm(DisasContext
*ctx
, uint32_t opc
,
3392 int rt
, int rs
, int16_t imm
)
3397 /* If no destination, treat it as a NOP. */
3400 uimm
= (uint16_t)imm
;
3403 if (likely(rs
!= 0)) {
3404 tcg_gen_andi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3406 tcg_gen_movi_tl(cpu_gpr
[rt
], 0);
3411 tcg_gen_ori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3413 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3417 if (likely(rs
!= 0)) {
3418 tcg_gen_xori_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], uimm
);
3420 tcg_gen_movi_tl(cpu_gpr
[rt
], uimm
);
3424 if (rs
!= 0 && (ctx
->insn_flags
& ISA_MIPS_R6
)) {
3426 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
<< 16);
3427 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
3429 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
<< 16);
3438 /* Set on less than with immediate operand */
3439 static void gen_slt_imm(DisasContext
*ctx
, uint32_t opc
,
3440 int rt
, int rs
, int16_t imm
)
3442 target_ulong uimm
= (target_long
)imm
; /* Sign extend to 32/64 bits */
3446 /* If no destination, treat it as a NOP. */
3449 t0
= tcg_temp_new();
3450 gen_load_gpr(t0
, rs
);
3453 tcg_gen_setcondi_tl(TCG_COND_LT
, cpu_gpr
[rt
], t0
, uimm
);
3456 tcg_gen_setcondi_tl(TCG_COND_LTU
, cpu_gpr
[rt
], t0
, uimm
);
3462 /* Shifts with immediate operand */
3463 static void gen_shift_imm(DisasContext
*ctx
, uint32_t opc
,
3464 int rt
, int rs
, int16_t imm
)
3466 target_ulong uimm
= ((uint16_t)imm
) & 0x1f;
3470 /* If no destination, treat it as a NOP. */
3474 t0
= tcg_temp_new();
3475 gen_load_gpr(t0
, rs
);
3478 tcg_gen_shli_tl(t0
, t0
, uimm
);
3479 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3482 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3486 tcg_gen_ext32u_tl(t0
, t0
);
3487 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3489 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3494 TCGv_i32 t1
= tcg_temp_new_i32();
3496 tcg_gen_trunc_tl_i32(t1
, t0
);
3497 tcg_gen_rotri_i32(t1
, t1
, uimm
);
3498 tcg_gen_ext_i32_tl(cpu_gpr
[rt
], t1
);
3499 tcg_temp_free_i32(t1
);
3501 tcg_gen_ext32s_tl(cpu_gpr
[rt
], t0
);
3504 #if defined(TARGET_MIPS64)
3506 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
);
3509 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
);
3512 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
);
3516 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
);
3518 tcg_gen_mov_tl(cpu_gpr
[rt
], t0
);
3522 tcg_gen_shli_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3525 tcg_gen_sari_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3528 tcg_gen_shri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3531 tcg_gen_rotri_tl(cpu_gpr
[rt
], t0
, uimm
+ 32);
3539 static void gen_arith(DisasContext
*ctx
, uint32_t opc
,
3540 int rd
, int rs
, int rt
)
3542 if (rd
== 0 && opc
!= OPC_ADD
&& opc
!= OPC_SUB
3543 && opc
!= OPC_DADD
&& opc
!= OPC_DSUB
) {
3545 * If no destination, treat it as a NOP.
3546 * For add & sub, we must generate the overflow exception when needed.
3554 TCGv t0
= tcg_temp_local_new();
3555 TCGv t1
= tcg_temp_new();
3556 TCGv t2
= tcg_temp_new();
3557 TCGLabel
*l1
= gen_new_label();
3559 gen_load_gpr(t1
, rs
);
3560 gen_load_gpr(t2
, rt
);
3561 tcg_gen_add_tl(t0
, t1
, t2
);
3562 tcg_gen_ext32s_tl(t0
, t0
);
3563 tcg_gen_xor_tl(t1
, t1
, t2
);
3564 tcg_gen_xor_tl(t2
, t0
, t2
);
3565 tcg_gen_andc_tl(t1
, t2
, t1
);
3567 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3569 /* operands of same sign, result different sign */
3570 generate_exception(ctx
, EXCP_OVERFLOW
);
3572 gen_store_gpr(t0
, rd
);
3577 if (rs
!= 0 && rt
!= 0) {
3578 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3579 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3580 } else if (rs
== 0 && rt
!= 0) {
3581 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3582 } else if (rs
!= 0 && rt
== 0) {
3583 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3585 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3590 TCGv t0
= tcg_temp_local_new();
3591 TCGv t1
= tcg_temp_new();
3592 TCGv t2
= tcg_temp_new();
3593 TCGLabel
*l1
= gen_new_label();
3595 gen_load_gpr(t1
, rs
);
3596 gen_load_gpr(t2
, rt
);
3597 tcg_gen_sub_tl(t0
, t1
, t2
);
3598 tcg_gen_ext32s_tl(t0
, t0
);
3599 tcg_gen_xor_tl(t2
, t1
, t2
);
3600 tcg_gen_xor_tl(t1
, t0
, t1
);
3601 tcg_gen_and_tl(t1
, t1
, t2
);
3603 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3606 * operands of different sign, first operand and the result
3609 generate_exception(ctx
, EXCP_OVERFLOW
);
3611 gen_store_gpr(t0
, rd
);
3616 if (rs
!= 0 && rt
!= 0) {
3617 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3618 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3619 } else if (rs
== 0 && rt
!= 0) {
3620 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3621 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3622 } else if (rs
!= 0 && rt
== 0) {
3623 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3625 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3628 #if defined(TARGET_MIPS64)
3631 TCGv t0
= tcg_temp_local_new();
3632 TCGv t1
= tcg_temp_new();
3633 TCGv t2
= tcg_temp_new();
3634 TCGLabel
*l1
= gen_new_label();
3636 gen_load_gpr(t1
, rs
);
3637 gen_load_gpr(t2
, rt
);
3638 tcg_gen_add_tl(t0
, t1
, t2
);
3639 tcg_gen_xor_tl(t1
, t1
, t2
);
3640 tcg_gen_xor_tl(t2
, t0
, t2
);
3641 tcg_gen_andc_tl(t1
, t2
, t1
);
3643 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3645 /* operands of same sign, result different sign */
3646 generate_exception(ctx
, EXCP_OVERFLOW
);
3648 gen_store_gpr(t0
, rd
);
3653 if (rs
!= 0 && rt
!= 0) {
3654 tcg_gen_add_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3655 } else if (rs
== 0 && rt
!= 0) {
3656 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3657 } else if (rs
!= 0 && rt
== 0) {
3658 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3660 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3665 TCGv t0
= tcg_temp_local_new();
3666 TCGv t1
= tcg_temp_new();
3667 TCGv t2
= tcg_temp_new();
3668 TCGLabel
*l1
= gen_new_label();
3670 gen_load_gpr(t1
, rs
);
3671 gen_load_gpr(t2
, rt
);
3672 tcg_gen_sub_tl(t0
, t1
, t2
);
3673 tcg_gen_xor_tl(t2
, t1
, t2
);
3674 tcg_gen_xor_tl(t1
, t0
, t1
);
3675 tcg_gen_and_tl(t1
, t1
, t2
);
3677 tcg_gen_brcondi_tl(TCG_COND_GE
, t1
, 0, l1
);
3680 * Operands of different sign, first operand and result different
3683 generate_exception(ctx
, EXCP_OVERFLOW
);
3685 gen_store_gpr(t0
, rd
);
3690 if (rs
!= 0 && rt
!= 0) {
3691 tcg_gen_sub_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3692 } else if (rs
== 0 && rt
!= 0) {
3693 tcg_gen_neg_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3694 } else if (rs
!= 0 && rt
== 0) {
3695 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3697 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3702 if (likely(rs
!= 0 && rt
!= 0)) {
3703 tcg_gen_mul_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3704 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
3706 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3712 /* Conditional move */
3713 static void gen_cond_move(DisasContext
*ctx
, uint32_t opc
,
3714 int rd
, int rs
, int rt
)
3719 /* If no destination, treat it as a NOP. */
3723 t0
= tcg_temp_new();
3724 gen_load_gpr(t0
, rt
);
3725 t1
= tcg_const_tl(0);
3726 t2
= tcg_temp_new();
3727 gen_load_gpr(t2
, rs
);
3730 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3733 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, cpu_gpr
[rd
]);
3736 tcg_gen_movcond_tl(TCG_COND_NE
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3739 tcg_gen_movcond_tl(TCG_COND_EQ
, cpu_gpr
[rd
], t0
, t1
, t2
, t1
);
3748 static void gen_logic(DisasContext
*ctx
, uint32_t opc
,
3749 int rd
, int rs
, int rt
)
3752 /* If no destination, treat it as a NOP. */
3758 if (likely(rs
!= 0 && rt
!= 0)) {
3759 tcg_gen_and_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3761 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3765 if (rs
!= 0 && rt
!= 0) {
3766 tcg_gen_nor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3767 } else if (rs
== 0 && rt
!= 0) {
3768 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3769 } else if (rs
!= 0 && rt
== 0) {
3770 tcg_gen_not_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3772 tcg_gen_movi_tl(cpu_gpr
[rd
], ~((target_ulong
)0));
3776 if (likely(rs
!= 0 && rt
!= 0)) {
3777 tcg_gen_or_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3778 } else if (rs
== 0 && rt
!= 0) {
3779 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3780 } else if (rs
!= 0 && rt
== 0) {
3781 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3783 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3787 if (likely(rs
!= 0 && rt
!= 0)) {
3788 tcg_gen_xor_tl(cpu_gpr
[rd
], cpu_gpr
[rs
], cpu_gpr
[rt
]);
3789 } else if (rs
== 0 && rt
!= 0) {
3790 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rt
]);
3791 } else if (rs
!= 0 && rt
== 0) {
3792 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
3794 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
3800 /* Set on lower than */
3801 static void gen_slt(DisasContext
*ctx
, uint32_t opc
,
3802 int rd
, int rs
, int rt
)
3807 /* If no destination, treat it as a NOP. */
3811 t0
= tcg_temp_new();
3812 t1
= tcg_temp_new();
3813 gen_load_gpr(t0
, rs
);
3814 gen_load_gpr(t1
, rt
);
3817 tcg_gen_setcond_tl(TCG_COND_LT
, cpu_gpr
[rd
], t0
, t1
);
3820 tcg_gen_setcond_tl(TCG_COND_LTU
, cpu_gpr
[rd
], t0
, t1
);
3828 static void gen_shift(DisasContext
*ctx
, uint32_t opc
,
3829 int rd
, int rs
, int rt
)
3835 * If no destination, treat it as a NOP.
3836 * For add & sub, we must generate the overflow exception when needed.
3841 t0
= tcg_temp_new();
3842 t1
= tcg_temp_new();
3843 gen_load_gpr(t0
, rs
);
3844 gen_load_gpr(t1
, rt
);
3847 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3848 tcg_gen_shl_tl(t0
, t1
, t0
);
3849 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3852 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3853 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3856 tcg_gen_ext32u_tl(t1
, t1
);
3857 tcg_gen_andi_tl(t0
, t0
, 0x1f);
3858 tcg_gen_shr_tl(t0
, t1
, t0
);
3859 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
3863 TCGv_i32 t2
= tcg_temp_new_i32();
3864 TCGv_i32 t3
= tcg_temp_new_i32();
3866 tcg_gen_trunc_tl_i32(t2
, t0
);
3867 tcg_gen_trunc_tl_i32(t3
, t1
);
3868 tcg_gen_andi_i32(t2
, t2
, 0x1f);
3869 tcg_gen_rotr_i32(t2
, t3
, t2
);
3870 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
3871 tcg_temp_free_i32(t2
);
3872 tcg_temp_free_i32(t3
);
3875 #if defined(TARGET_MIPS64)
3877 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3878 tcg_gen_shl_tl(cpu_gpr
[rd
], t1
, t0
);
3881 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3882 tcg_gen_sar_tl(cpu_gpr
[rd
], t1
, t0
);
3885 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3886 tcg_gen_shr_tl(cpu_gpr
[rd
], t1
, t0
);
3889 tcg_gen_andi_tl(t0
, t0
, 0x3f);
3890 tcg_gen_rotr_tl(cpu_gpr
[rd
], t1
, t0
);
3898 #if defined(TARGET_MIPS64)
3899 /* Copy GPR to and from TX79 HI1/LO1 register. */
3900 static void gen_HILO1_tx79(DisasContext
*ctx
, uint32_t opc
, int reg
)
3904 gen_store_gpr(cpu_HI
[1], reg
);
3907 gen_store_gpr(cpu_LO
[1], reg
);
3910 gen_load_gpr(cpu_HI
[1], reg
);
3913 gen_load_gpr(cpu_LO
[1], reg
);
3916 MIPS_INVAL("mfthilo1 TX79");
3917 gen_reserved_instruction(ctx
);
3923 /* Arithmetic on HI/LO registers */
3924 static void gen_HILO(DisasContext
*ctx
, uint32_t opc
, int acc
, int reg
)
3926 if (reg
== 0 && (opc
== OPC_MFHI
|| opc
== OPC_MFLO
)) {
3937 #if defined(TARGET_MIPS64)
3939 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3943 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_HI
[acc
]);
3947 #if defined(TARGET_MIPS64)
3949 tcg_gen_ext32s_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3953 tcg_gen_mov_tl(cpu_gpr
[reg
], cpu_LO
[acc
]);
3958 #if defined(TARGET_MIPS64)
3960 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3964 tcg_gen_mov_tl(cpu_HI
[acc
], cpu_gpr
[reg
]);
3967 tcg_gen_movi_tl(cpu_HI
[acc
], 0);
3972 #if defined(TARGET_MIPS64)
3974 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3978 tcg_gen_mov_tl(cpu_LO
[acc
], cpu_gpr
[reg
]);
3981 tcg_gen_movi_tl(cpu_LO
[acc
], 0);
3987 static inline void gen_r6_ld(target_long addr
, int reg
, int memidx
,
3990 TCGv t0
= tcg_const_tl(addr
);
3991 tcg_gen_qemu_ld_tl(t0
, t0
, memidx
, memop
);
3992 gen_store_gpr(t0
, reg
);
3996 static inline void gen_pcrel(DisasContext
*ctx
, int opc
, target_ulong pc
,
4002 switch (MASK_OPC_PCREL_TOP2BITS(opc
)) {
4005 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4006 addr
= addr_add(ctx
, pc
, offset
);
4007 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4011 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4012 addr
= addr_add(ctx
, pc
, offset
);
4013 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TESL
);
4015 #if defined(TARGET_MIPS64)
4018 offset
= sextract32(ctx
->opcode
<< 2, 0, 21);
4019 addr
= addr_add(ctx
, pc
, offset
);
4020 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEUL
);
4024 switch (MASK_OPC_PCREL_TOP5BITS(opc
)) {
4027 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4028 addr
= addr_add(ctx
, pc
, offset
);
4029 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4034 offset
= sextract32(ctx
->opcode
, 0, 16) << 16;
4035 addr
= ~0xFFFF & addr_add(ctx
, pc
, offset
);
4036 tcg_gen_movi_tl(cpu_gpr
[rs
], addr
);
4039 #if defined(TARGET_MIPS64)
4040 case R6_OPC_LDPC
: /* bits 16 and 17 are part of immediate */
4041 case R6_OPC_LDPC
+ (1 << 16):
4042 case R6_OPC_LDPC
+ (2 << 16):
4043 case R6_OPC_LDPC
+ (3 << 16):
4045 offset
= sextract32(ctx
->opcode
<< 3, 0, 21);
4046 addr
= addr_add(ctx
, (pc
& ~0x7), offset
);
4047 gen_r6_ld(addr
, rs
, ctx
->mem_idx
, MO_TEQ
);
4051 MIPS_INVAL("OPC_PCREL");
4052 gen_reserved_instruction(ctx
);
4059 static void gen_r6_muldiv(DisasContext
*ctx
, int opc
, int rd
, int rs
, int rt
)
4068 t0
= tcg_temp_new();
4069 t1
= tcg_temp_new();
4071 gen_load_gpr(t0
, rs
);
4072 gen_load_gpr(t1
, rt
);
4077 TCGv t2
= tcg_temp_new();
4078 TCGv t3
= tcg_temp_new();
4079 tcg_gen_ext32s_tl(t0
, t0
);
4080 tcg_gen_ext32s_tl(t1
, t1
);
4081 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4082 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4083 tcg_gen_and_tl(t2
, t2
, t3
);
4084 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4085 tcg_gen_or_tl(t2
, t2
, t3
);
4086 tcg_gen_movi_tl(t3
, 0);
4087 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4088 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4089 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4096 TCGv t2
= tcg_temp_new();
4097 TCGv t3
= tcg_temp_new();
4098 tcg_gen_ext32s_tl(t0
, t0
);
4099 tcg_gen_ext32s_tl(t1
, t1
);
4100 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4101 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4102 tcg_gen_and_tl(t2
, t2
, t3
);
4103 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4104 tcg_gen_or_tl(t2
, t2
, t3
);
4105 tcg_gen_movi_tl(t3
, 0);
4106 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4107 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4108 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4115 TCGv t2
= tcg_const_tl(0);
4116 TCGv t3
= tcg_const_tl(1);
4117 tcg_gen_ext32u_tl(t0
, t0
);
4118 tcg_gen_ext32u_tl(t1
, t1
);
4119 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4120 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4121 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4128 TCGv t2
= tcg_const_tl(0);
4129 TCGv t3
= tcg_const_tl(1);
4130 tcg_gen_ext32u_tl(t0
, t0
);
4131 tcg_gen_ext32u_tl(t1
, t1
);
4132 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4133 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4134 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4141 TCGv_i32 t2
= tcg_temp_new_i32();
4142 TCGv_i32 t3
= tcg_temp_new_i32();
4143 tcg_gen_trunc_tl_i32(t2
, t0
);
4144 tcg_gen_trunc_tl_i32(t3
, t1
);
4145 tcg_gen_mul_i32(t2
, t2
, t3
);
4146 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4147 tcg_temp_free_i32(t2
);
4148 tcg_temp_free_i32(t3
);
4153 TCGv_i32 t2
= tcg_temp_new_i32();
4154 TCGv_i32 t3
= tcg_temp_new_i32();
4155 tcg_gen_trunc_tl_i32(t2
, t0
);
4156 tcg_gen_trunc_tl_i32(t3
, t1
);
4157 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4158 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4159 tcg_temp_free_i32(t2
);
4160 tcg_temp_free_i32(t3
);
4165 TCGv_i32 t2
= tcg_temp_new_i32();
4166 TCGv_i32 t3
= tcg_temp_new_i32();
4167 tcg_gen_trunc_tl_i32(t2
, t0
);
4168 tcg_gen_trunc_tl_i32(t3
, t1
);
4169 tcg_gen_mul_i32(t2
, t2
, t3
);
4170 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4171 tcg_temp_free_i32(t2
);
4172 tcg_temp_free_i32(t3
);
4177 TCGv_i32 t2
= tcg_temp_new_i32();
4178 TCGv_i32 t3
= tcg_temp_new_i32();
4179 tcg_gen_trunc_tl_i32(t2
, t0
);
4180 tcg_gen_trunc_tl_i32(t3
, t1
);
4181 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4182 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t3
);
4183 tcg_temp_free_i32(t2
);
4184 tcg_temp_free_i32(t3
);
4187 #if defined(TARGET_MIPS64)
4190 TCGv t2
= tcg_temp_new();
4191 TCGv t3
= tcg_temp_new();
4192 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4193 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4194 tcg_gen_and_tl(t2
, t2
, t3
);
4195 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4196 tcg_gen_or_tl(t2
, t2
, t3
);
4197 tcg_gen_movi_tl(t3
, 0);
4198 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4199 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4206 TCGv t2
= tcg_temp_new();
4207 TCGv t3
= tcg_temp_new();
4208 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4209 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4210 tcg_gen_and_tl(t2
, t2
, t3
);
4211 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4212 tcg_gen_or_tl(t2
, t2
, t3
);
4213 tcg_gen_movi_tl(t3
, 0);
4214 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4215 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4222 TCGv t2
= tcg_const_tl(0);
4223 TCGv t3
= tcg_const_tl(1);
4224 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4225 tcg_gen_divu_i64(cpu_gpr
[rd
], t0
, t1
);
4232 TCGv t2
= tcg_const_tl(0);
4233 TCGv t3
= tcg_const_tl(1);
4234 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4235 tcg_gen_remu_i64(cpu_gpr
[rd
], t0
, t1
);
4241 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4245 TCGv t2
= tcg_temp_new();
4246 tcg_gen_muls2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4251 tcg_gen_mul_i64(cpu_gpr
[rd
], t0
, t1
);
4255 TCGv t2
= tcg_temp_new();
4256 tcg_gen_mulu2_i64(t2
, cpu_gpr
[rd
], t0
, t1
);
4262 MIPS_INVAL("r6 mul/div");
4263 gen_reserved_instruction(ctx
);
4271 #if defined(TARGET_MIPS64)
4272 static void gen_div1_tx79(DisasContext
*ctx
, uint32_t opc
, int rs
, int rt
)
4276 t0
= tcg_temp_new();
4277 t1
= tcg_temp_new();
4279 gen_load_gpr(t0
, rs
);
4280 gen_load_gpr(t1
, rt
);
4285 TCGv t2
= tcg_temp_new();
4286 TCGv t3
= tcg_temp_new();
4287 tcg_gen_ext32s_tl(t0
, t0
);
4288 tcg_gen_ext32s_tl(t1
, t1
);
4289 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4290 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4291 tcg_gen_and_tl(t2
, t2
, t3
);
4292 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4293 tcg_gen_or_tl(t2
, t2
, t3
);
4294 tcg_gen_movi_tl(t3
, 0);
4295 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4296 tcg_gen_div_tl(cpu_LO
[1], t0
, t1
);
4297 tcg_gen_rem_tl(cpu_HI
[1], t0
, t1
);
4298 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4299 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4306 TCGv t2
= tcg_const_tl(0);
4307 TCGv t3
= tcg_const_tl(1);
4308 tcg_gen_ext32u_tl(t0
, t0
);
4309 tcg_gen_ext32u_tl(t1
, t1
);
4310 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4311 tcg_gen_divu_tl(cpu_LO
[1], t0
, t1
);
4312 tcg_gen_remu_tl(cpu_HI
[1], t0
, t1
);
4313 tcg_gen_ext32s_tl(cpu_LO
[1], cpu_LO
[1]);
4314 tcg_gen_ext32s_tl(cpu_HI
[1], cpu_HI
[1]);
4320 MIPS_INVAL("div1 TX79");
4321 gen_reserved_instruction(ctx
);
4330 static void gen_muldiv(DisasContext
*ctx
, uint32_t opc
,
4331 int acc
, int rs
, int rt
)
4335 t0
= tcg_temp_new();
4336 t1
= tcg_temp_new();
4338 gen_load_gpr(t0
, rs
);
4339 gen_load_gpr(t1
, rt
);
4348 TCGv t2
= tcg_temp_new();
4349 TCGv t3
= tcg_temp_new();
4350 tcg_gen_ext32s_tl(t0
, t0
);
4351 tcg_gen_ext32s_tl(t1
, t1
);
4352 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, INT_MIN
);
4353 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1);
4354 tcg_gen_and_tl(t2
, t2
, t3
);
4355 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4356 tcg_gen_or_tl(t2
, t2
, t3
);
4357 tcg_gen_movi_tl(t3
, 0);
4358 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4359 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4360 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4361 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4362 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4369 TCGv t2
= tcg_const_tl(0);
4370 TCGv t3
= tcg_const_tl(1);
4371 tcg_gen_ext32u_tl(t0
, t0
);
4372 tcg_gen_ext32u_tl(t1
, t1
);
4373 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4374 tcg_gen_divu_tl(cpu_LO
[acc
], t0
, t1
);
4375 tcg_gen_remu_tl(cpu_HI
[acc
], t0
, t1
);
4376 tcg_gen_ext32s_tl(cpu_LO
[acc
], cpu_LO
[acc
]);
4377 tcg_gen_ext32s_tl(cpu_HI
[acc
], cpu_HI
[acc
]);
4384 TCGv_i32 t2
= tcg_temp_new_i32();
4385 TCGv_i32 t3
= tcg_temp_new_i32();
4386 tcg_gen_trunc_tl_i32(t2
, t0
);
4387 tcg_gen_trunc_tl_i32(t3
, t1
);
4388 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4389 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4390 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4391 tcg_temp_free_i32(t2
);
4392 tcg_temp_free_i32(t3
);
4397 TCGv_i32 t2
= tcg_temp_new_i32();
4398 TCGv_i32 t3
= tcg_temp_new_i32();
4399 tcg_gen_trunc_tl_i32(t2
, t0
);
4400 tcg_gen_trunc_tl_i32(t3
, t1
);
4401 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4402 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4403 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4404 tcg_temp_free_i32(t2
);
4405 tcg_temp_free_i32(t3
);
4408 #if defined(TARGET_MIPS64)
4411 TCGv t2
= tcg_temp_new();
4412 TCGv t3
= tcg_temp_new();
4413 tcg_gen_setcondi_tl(TCG_COND_EQ
, t2
, t0
, -1LL << 63);
4414 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, -1LL);
4415 tcg_gen_and_tl(t2
, t2
, t3
);
4416 tcg_gen_setcondi_tl(TCG_COND_EQ
, t3
, t1
, 0);
4417 tcg_gen_or_tl(t2
, t2
, t3
);
4418 tcg_gen_movi_tl(t3
, 0);
4419 tcg_gen_movcond_tl(TCG_COND_NE
, t1
, t2
, t3
, t2
, t1
);
4420 tcg_gen_div_tl(cpu_LO
[acc
], t0
, t1
);
4421 tcg_gen_rem_tl(cpu_HI
[acc
], t0
, t1
);
4428 TCGv t2
= tcg_const_tl(0);
4429 TCGv t3
= tcg_const_tl(1);
4430 tcg_gen_movcond_tl(TCG_COND_EQ
, t1
, t1
, t2
, t3
, t1
);
4431 tcg_gen_divu_i64(cpu_LO
[acc
], t0
, t1
);
4432 tcg_gen_remu_i64(cpu_HI
[acc
], t0
, t1
);
4438 tcg_gen_muls2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4441 tcg_gen_mulu2_i64(cpu_LO
[acc
], cpu_HI
[acc
], t0
, t1
);
4446 TCGv_i64 t2
= tcg_temp_new_i64();
4447 TCGv_i64 t3
= tcg_temp_new_i64();
4449 tcg_gen_ext_tl_i64(t2
, t0
);
4450 tcg_gen_ext_tl_i64(t3
, t1
);
4451 tcg_gen_mul_i64(t2
, t2
, t3
);
4452 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4453 tcg_gen_add_i64(t2
, t2
, t3
);
4454 tcg_temp_free_i64(t3
);
4455 gen_move_low32(cpu_LO
[acc
], t2
);
4456 gen_move_high32(cpu_HI
[acc
], t2
);
4457 tcg_temp_free_i64(t2
);
4462 TCGv_i64 t2
= tcg_temp_new_i64();
4463 TCGv_i64 t3
= tcg_temp_new_i64();
4465 tcg_gen_ext32u_tl(t0
, t0
);
4466 tcg_gen_ext32u_tl(t1
, t1
);
4467 tcg_gen_extu_tl_i64(t2
, t0
);
4468 tcg_gen_extu_tl_i64(t3
, t1
);
4469 tcg_gen_mul_i64(t2
, t2
, t3
);
4470 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4471 tcg_gen_add_i64(t2
, t2
, t3
);
4472 tcg_temp_free_i64(t3
);
4473 gen_move_low32(cpu_LO
[acc
], t2
);
4474 gen_move_high32(cpu_HI
[acc
], t2
);
4475 tcg_temp_free_i64(t2
);
4480 TCGv_i64 t2
= tcg_temp_new_i64();
4481 TCGv_i64 t3
= tcg_temp_new_i64();
4483 tcg_gen_ext_tl_i64(t2
, t0
);
4484 tcg_gen_ext_tl_i64(t3
, t1
);
4485 tcg_gen_mul_i64(t2
, t2
, t3
);
4486 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4487 tcg_gen_sub_i64(t2
, t3
, t2
);
4488 tcg_temp_free_i64(t3
);
4489 gen_move_low32(cpu_LO
[acc
], t2
);
4490 gen_move_high32(cpu_HI
[acc
], t2
);
4491 tcg_temp_free_i64(t2
);
4496 TCGv_i64 t2
= tcg_temp_new_i64();
4497 TCGv_i64 t3
= tcg_temp_new_i64();
4499 tcg_gen_ext32u_tl(t0
, t0
);
4500 tcg_gen_ext32u_tl(t1
, t1
);
4501 tcg_gen_extu_tl_i64(t2
, t0
);
4502 tcg_gen_extu_tl_i64(t3
, t1
);
4503 tcg_gen_mul_i64(t2
, t2
, t3
);
4504 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4505 tcg_gen_sub_i64(t2
, t3
, t2
);
4506 tcg_temp_free_i64(t3
);
4507 gen_move_low32(cpu_LO
[acc
], t2
);
4508 gen_move_high32(cpu_HI
[acc
], t2
);
4509 tcg_temp_free_i64(t2
);
4513 MIPS_INVAL("mul/div");
4514 gen_reserved_instruction(ctx
);
4523 * These MULT[U] and MADD[U] instructions implemented in for example
4524 * the Toshiba/Sony R5900 and the Toshiba TX19, TX39 and TX79 core
4525 * architectures are special three-operand variants with the syntax
4527 * MULT[U][1] rd, rs, rt
4531 * (rd, LO, HI) <- rs * rt
4535 * MADD[U][1] rd, rs, rt
4539 * (rd, LO, HI) <- (LO, HI) + rs * rt
4541 * where the low-order 32-bits of the result is placed into both the
4542 * GPR rd and the special register LO. The high-order 32-bits of the
4543 * result is placed into the special register HI.
4545 * If the GPR rd is omitted in assembly language, it is taken to be 0,
4546 * which is the zero register that always reads as 0.
4548 static void gen_mul_txx9(DisasContext
*ctx
, uint32_t opc
,
4549 int rd
, int rs
, int rt
)
4551 TCGv t0
= tcg_temp_new();
4552 TCGv t1
= tcg_temp_new();
4555 gen_load_gpr(t0
, rs
);
4556 gen_load_gpr(t1
, rt
);
4564 TCGv_i32 t2
= tcg_temp_new_i32();
4565 TCGv_i32 t3
= tcg_temp_new_i32();
4566 tcg_gen_trunc_tl_i32(t2
, t0
);
4567 tcg_gen_trunc_tl_i32(t3
, t1
);
4568 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
4570 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4572 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4573 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4574 tcg_temp_free_i32(t2
);
4575 tcg_temp_free_i32(t3
);
4578 case MMI_OPC_MULTU1
:
4583 TCGv_i32 t2
= tcg_temp_new_i32();
4584 TCGv_i32 t3
= tcg_temp_new_i32();
4585 tcg_gen_trunc_tl_i32(t2
, t0
);
4586 tcg_gen_trunc_tl_i32(t3
, t1
);
4587 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
4589 tcg_gen_ext_i32_tl(cpu_gpr
[rd
], t2
);
4591 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
4592 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
4593 tcg_temp_free_i32(t2
);
4594 tcg_temp_free_i32(t3
);
4602 TCGv_i64 t2
= tcg_temp_new_i64();
4603 TCGv_i64 t3
= tcg_temp_new_i64();
4605 tcg_gen_ext_tl_i64(t2
, t0
);
4606 tcg_gen_ext_tl_i64(t3
, t1
);
4607 tcg_gen_mul_i64(t2
, t2
, t3
);
4608 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4609 tcg_gen_add_i64(t2
, t2
, t3
);
4610 tcg_temp_free_i64(t3
);
4611 gen_move_low32(cpu_LO
[acc
], t2
);
4612 gen_move_high32(cpu_HI
[acc
], t2
);
4614 gen_move_low32(cpu_gpr
[rd
], t2
);
4616 tcg_temp_free_i64(t2
);
4619 case MMI_OPC_MADDU1
:
4624 TCGv_i64 t2
= tcg_temp_new_i64();
4625 TCGv_i64 t3
= tcg_temp_new_i64();
4627 tcg_gen_ext32u_tl(t0
, t0
);
4628 tcg_gen_ext32u_tl(t1
, t1
);
4629 tcg_gen_extu_tl_i64(t2
, t0
);
4630 tcg_gen_extu_tl_i64(t3
, t1
);
4631 tcg_gen_mul_i64(t2
, t2
, t3
);
4632 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
4633 tcg_gen_add_i64(t2
, t2
, t3
);
4634 tcg_temp_free_i64(t3
);
4635 gen_move_low32(cpu_LO
[acc
], t2
);
4636 gen_move_high32(cpu_HI
[acc
], t2
);
4638 gen_move_low32(cpu_gpr
[rd
], t2
);
4640 tcg_temp_free_i64(t2
);
4644 MIPS_INVAL("mul/madd TXx9");
4645 gen_reserved_instruction(ctx
);
4654 static void gen_mul_vr54xx(DisasContext
*ctx
, uint32_t opc
,
4655 int rd
, int rs
, int rt
)
4657 TCGv t0
= tcg_temp_new();
4658 TCGv t1
= tcg_temp_new();
4660 gen_load_gpr(t0
, rs
);
4661 gen_load_gpr(t1
, rt
);
4664 case OPC_VR54XX_MULS
:
4665 gen_helper_muls(t0
, cpu_env
, t0
, t1
);
4667 case OPC_VR54XX_MULSU
:
4668 gen_helper_mulsu(t0
, cpu_env
, t0
, t1
);
4670 case OPC_VR54XX_MACC
:
4671 gen_helper_macc(t0
, cpu_env
, t0
, t1
);
4673 case OPC_VR54XX_MACCU
:
4674 gen_helper_maccu(t0
, cpu_env
, t0
, t1
);
4676 case OPC_VR54XX_MSAC
:
4677 gen_helper_msac(t0
, cpu_env
, t0
, t1
);
4679 case OPC_VR54XX_MSACU
:
4680 gen_helper_msacu(t0
, cpu_env
, t0
, t1
);
4682 case OPC_VR54XX_MULHI
:
4683 gen_helper_mulhi(t0
, cpu_env
, t0
, t1
);
4685 case OPC_VR54XX_MULHIU
:
4686 gen_helper_mulhiu(t0
, cpu_env
, t0
, t1
);
4688 case OPC_VR54XX_MULSHI
:
4689 gen_helper_mulshi(t0
, cpu_env
, t0
, t1
);
4691 case OPC_VR54XX_MULSHIU
:
4692 gen_helper_mulshiu(t0
, cpu_env
, t0
, t1
);
4694 case OPC_VR54XX_MACCHI
:
4695 gen_helper_macchi(t0
, cpu_env
, t0
, t1
);
4697 case OPC_VR54XX_MACCHIU
:
4698 gen_helper_macchiu(t0
, cpu_env
, t0
, t1
);
4700 case OPC_VR54XX_MSACHI
:
4701 gen_helper_msachi(t0
, cpu_env
, t0
, t1
);
4703 case OPC_VR54XX_MSACHIU
:
4704 gen_helper_msachiu(t0
, cpu_env
, t0
, t1
);
4707 MIPS_INVAL("mul vr54xx");
4708 gen_reserved_instruction(ctx
);
4711 gen_store_gpr(t0
, rd
);
4718 static void gen_cl(DisasContext
*ctx
, uint32_t opc
,
4728 gen_load_gpr(t0
, rs
);
4733 #if defined(TARGET_MIPS64)
4737 tcg_gen_not_tl(t0
, t0
);
4746 tcg_gen_ext32u_tl(t0
, t0
);
4747 tcg_gen_clzi_tl(t0
, t0
, TARGET_LONG_BITS
);
4748 tcg_gen_subi_tl(t0
, t0
, TARGET_LONG_BITS
- 32);
4750 #if defined(TARGET_MIPS64)
4755 tcg_gen_clzi_i64(t0
, t0
, 64);
4761 /* Godson integer instructions */
4762 static void gen_loongson_integer(DisasContext
*ctx
, uint32_t opc
,
4763 int rd
, int rs
, int rt
)
4775 case OPC_MULTU_G_2E
:
4776 case OPC_MULTU_G_2F
:
4777 #if defined(TARGET_MIPS64)
4778 case OPC_DMULT_G_2E
:
4779 case OPC_DMULT_G_2F
:
4780 case OPC_DMULTU_G_2E
:
4781 case OPC_DMULTU_G_2F
:
4783 t0
= tcg_temp_new();
4784 t1
= tcg_temp_new();
4787 t0
= tcg_temp_local_new();
4788 t1
= tcg_temp_local_new();
4792 gen_load_gpr(t0
, rs
);
4793 gen_load_gpr(t1
, rt
);
4798 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4799 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4801 case OPC_MULTU_G_2E
:
4802 case OPC_MULTU_G_2F
:
4803 tcg_gen_ext32u_tl(t0
, t0
);
4804 tcg_gen_ext32u_tl(t1
, t1
);
4805 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4806 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4811 TCGLabel
*l1
= gen_new_label();
4812 TCGLabel
*l2
= gen_new_label();
4813 TCGLabel
*l3
= gen_new_label();
4814 tcg_gen_ext32s_tl(t0
, t0
);
4815 tcg_gen_ext32s_tl(t1
, t1
);
4816 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4817 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4820 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
4821 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
4822 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4825 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4826 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4833 TCGLabel
*l1
= gen_new_label();
4834 TCGLabel
*l2
= gen_new_label();
4835 tcg_gen_ext32u_tl(t0
, t0
);
4836 tcg_gen_ext32u_tl(t1
, t1
);
4837 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4838 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4841 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4842 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4849 TCGLabel
*l1
= gen_new_label();
4850 TCGLabel
*l2
= gen_new_label();
4851 TCGLabel
*l3
= gen_new_label();
4852 tcg_gen_ext32u_tl(t0
, t0
);
4853 tcg_gen_ext32u_tl(t1
, t1
);
4854 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4855 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, INT_MIN
, l2
);
4856 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1, l2
);
4858 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4861 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4862 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4869 TCGLabel
*l1
= gen_new_label();
4870 TCGLabel
*l2
= gen_new_label();
4871 tcg_gen_ext32u_tl(t0
, t0
);
4872 tcg_gen_ext32u_tl(t1
, t1
);
4873 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4874 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4877 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4878 tcg_gen_ext32s_tl(cpu_gpr
[rd
], cpu_gpr
[rd
]);
4882 #if defined(TARGET_MIPS64)
4883 case OPC_DMULT_G_2E
:
4884 case OPC_DMULT_G_2F
:
4885 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4887 case OPC_DMULTU_G_2E
:
4888 case OPC_DMULTU_G_2F
:
4889 tcg_gen_mul_tl(cpu_gpr
[rd
], t0
, t1
);
4894 TCGLabel
*l1
= gen_new_label();
4895 TCGLabel
*l2
= gen_new_label();
4896 TCGLabel
*l3
= gen_new_label();
4897 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4898 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4901 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4902 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4903 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
4906 tcg_gen_div_tl(cpu_gpr
[rd
], t0
, t1
);
4910 case OPC_DDIVU_G_2E
:
4911 case OPC_DDIVU_G_2F
:
4913 TCGLabel
*l1
= gen_new_label();
4914 TCGLabel
*l2
= gen_new_label();
4915 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4916 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4919 tcg_gen_divu_tl(cpu_gpr
[rd
], t0
, t1
);
4926 TCGLabel
*l1
= gen_new_label();
4927 TCGLabel
*l2
= gen_new_label();
4928 TCGLabel
*l3
= gen_new_label();
4929 tcg_gen_brcondi_tl(TCG_COND_EQ
, t1
, 0, l1
);
4930 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, -1LL << 63, l2
);
4931 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, -1LL, l2
);
4933 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4936 tcg_gen_rem_tl(cpu_gpr
[rd
], t0
, t1
);
4940 case OPC_DMODU_G_2E
:
4941 case OPC_DMODU_G_2F
:
4943 TCGLabel
*l1
= gen_new_label();
4944 TCGLabel
*l2
= gen_new_label();
4945 tcg_gen_brcondi_tl(TCG_COND_NE
, t1
, 0, l1
);
4946 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
4949 tcg_gen_remu_tl(cpu_gpr
[rd
], t0
, t1
);
4960 /* Loongson multimedia instructions */
4961 static void gen_loongson_multimedia(DisasContext
*ctx
, int rd
, int rs
, int rt
)
4963 uint32_t opc
, shift_max
;
4967 opc
= MASK_LMMI(ctx
->opcode
);
4973 t0
= tcg_temp_local_new_i64();
4974 t1
= tcg_temp_local_new_i64();
4977 t0
= tcg_temp_new_i64();
4978 t1
= tcg_temp_new_i64();
4982 check_cp1_enabled(ctx
);
4983 gen_load_fpr64(ctx
, t0
, rs
);
4984 gen_load_fpr64(ctx
, t1
, rt
);
4988 gen_helper_paddsh(t0
, t0
, t1
);
4991 gen_helper_paddush(t0
, t0
, t1
);
4994 gen_helper_paddh(t0
, t0
, t1
);
4997 gen_helper_paddw(t0
, t0
, t1
);
5000 gen_helper_paddsb(t0
, t0
, t1
);
5003 gen_helper_paddusb(t0
, t0
, t1
);
5006 gen_helper_paddb(t0
, t0
, t1
);
5010 gen_helper_psubsh(t0
, t0
, t1
);
5013 gen_helper_psubush(t0
, t0
, t1
);
5016 gen_helper_psubh(t0
, t0
, t1
);
5019 gen_helper_psubw(t0
, t0
, t1
);
5022 gen_helper_psubsb(t0
, t0
, t1
);
5025 gen_helper_psubusb(t0
, t0
, t1
);
5028 gen_helper_psubb(t0
, t0
, t1
);
5032 gen_helper_pshufh(t0
, t0
, t1
);
5035 gen_helper_packsswh(t0
, t0
, t1
);
5038 gen_helper_packsshb(t0
, t0
, t1
);
5041 gen_helper_packushb(t0
, t0
, t1
);
5045 gen_helper_punpcklhw(t0
, t0
, t1
);
5048 gen_helper_punpckhhw(t0
, t0
, t1
);
5051 gen_helper_punpcklbh(t0
, t0
, t1
);
5054 gen_helper_punpckhbh(t0
, t0
, t1
);
5057 gen_helper_punpcklwd(t0
, t0
, t1
);
5060 gen_helper_punpckhwd(t0
, t0
, t1
);
5064 gen_helper_pavgh(t0
, t0
, t1
);
5067 gen_helper_pavgb(t0
, t0
, t1
);
5070 gen_helper_pmaxsh(t0
, t0
, t1
);
5073 gen_helper_pminsh(t0
, t0
, t1
);
5076 gen_helper_pmaxub(t0
, t0
, t1
);
5079 gen_helper_pminub(t0
, t0
, t1
);
5083 gen_helper_pcmpeqw(t0
, t0
, t1
);
5086 gen_helper_pcmpgtw(t0
, t0
, t1
);
5089 gen_helper_pcmpeqh(t0
, t0
, t1
);
5092 gen_helper_pcmpgth(t0
, t0
, t1
);
5095 gen_helper_pcmpeqb(t0
, t0
, t1
);
5098 gen_helper_pcmpgtb(t0
, t0
, t1
);
5102 gen_helper_psllw(t0
, t0
, t1
);
5105 gen_helper_psllh(t0
, t0
, t1
);
5108 gen_helper_psrlw(t0
, t0
, t1
);
5111 gen_helper_psrlh(t0
, t0
, t1
);
5114 gen_helper_psraw(t0
, t0
, t1
);
5117 gen_helper_psrah(t0
, t0
, t1
);
5121 gen_helper_pmullh(t0
, t0
, t1
);
5124 gen_helper_pmulhh(t0
, t0
, t1
);
5127 gen_helper_pmulhuh(t0
, t0
, t1
);
5130 gen_helper_pmaddhw(t0
, t0
, t1
);
5134 gen_helper_pasubub(t0
, t0
, t1
);
5137 gen_helper_biadd(t0
, t0
);
5140 gen_helper_pmovmskb(t0
, t0
);
5144 tcg_gen_add_i64(t0
, t0
, t1
);
5147 tcg_gen_sub_i64(t0
, t0
, t1
);
5150 tcg_gen_xor_i64(t0
, t0
, t1
);
5153 tcg_gen_nor_i64(t0
, t0
, t1
);
5156 tcg_gen_and_i64(t0
, t0
, t1
);
5159 tcg_gen_or_i64(t0
, t0
, t1
);
5163 tcg_gen_andc_i64(t0
, t1
, t0
);
5167 tcg_gen_deposit_i64(t0
, t0
, t1
, 0, 16);
5170 tcg_gen_deposit_i64(t0
, t0
, t1
, 16, 16);
5173 tcg_gen_deposit_i64(t0
, t0
, t1
, 32, 16);
5176 tcg_gen_deposit_i64(t0
, t0
, t1
, 48, 16);
5180 tcg_gen_andi_i64(t1
, t1
, 3);
5181 tcg_gen_shli_i64(t1
, t1
, 4);
5182 tcg_gen_shr_i64(t0
, t0
, t1
);
5183 tcg_gen_ext16u_i64(t0
, t0
);
5187 tcg_gen_add_i64(t0
, t0
, t1
);
5188 tcg_gen_ext32s_i64(t0
, t0
);
5191 tcg_gen_sub_i64(t0
, t0
, t1
);
5192 tcg_gen_ext32s_i64(t0
, t0
);
5214 /* Make sure shift count isn't TCG undefined behaviour. */
5215 tcg_gen_andi_i64(t1
, t1
, shift_max
- 1);
5220 tcg_gen_shl_i64(t0
, t0
, t1
);
5225 * Since SRA is UndefinedResult without sign-extended inputs,
5226 * we can treat SRA and DSRA the same.
5228 tcg_gen_sar_i64(t0
, t0
, t1
);
5231 /* We want to shift in zeros for SRL; zero-extend first. */
5232 tcg_gen_ext32u_i64(t0
, t0
);
5235 tcg_gen_shr_i64(t0
, t0
, t1
);
5239 if (shift_max
== 32) {
5240 tcg_gen_ext32s_i64(t0
, t0
);
5243 /* Shifts larger than MAX produce zero. */
5244 tcg_gen_setcondi_i64(TCG_COND_LTU
, t1
, t1
, shift_max
);
5245 tcg_gen_neg_i64(t1
, t1
);
5246 tcg_gen_and_i64(t0
, t0
, t1
);
5252 TCGv_i64 t2
= tcg_temp_new_i64();
5253 TCGLabel
*lab
= gen_new_label();
5255 tcg_gen_mov_i64(t2
, t0
);
5256 tcg_gen_add_i64(t0
, t1
, t2
);
5257 if (opc
== OPC_ADD_CP2
) {
5258 tcg_gen_ext32s_i64(t0
, t0
);
5260 tcg_gen_xor_i64(t1
, t1
, t2
);
5261 tcg_gen_xor_i64(t2
, t2
, t0
);
5262 tcg_gen_andc_i64(t1
, t2
, t1
);
5263 tcg_temp_free_i64(t2
);
5264 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5265 generate_exception(ctx
, EXCP_OVERFLOW
);
5273 TCGv_i64 t2
= tcg_temp_new_i64();
5274 TCGLabel
*lab
= gen_new_label();
5276 tcg_gen_mov_i64(t2
, t0
);
5277 tcg_gen_sub_i64(t0
, t1
, t2
);
5278 if (opc
== OPC_SUB_CP2
) {
5279 tcg_gen_ext32s_i64(t0
, t0
);
5281 tcg_gen_xor_i64(t1
, t1
, t2
);
5282 tcg_gen_xor_i64(t2
, t2
, t0
);
5283 tcg_gen_and_i64(t1
, t1
, t2
);
5284 tcg_temp_free_i64(t2
);
5285 tcg_gen_brcondi_i64(TCG_COND_GE
, t1
, 0, lab
);
5286 generate_exception(ctx
, EXCP_OVERFLOW
);
5292 tcg_gen_ext32u_i64(t0
, t0
);
5293 tcg_gen_ext32u_i64(t1
, t1
);
5294 tcg_gen_mul_i64(t0
, t0
, t1
);
5303 cond
= TCG_COND_LTU
;
5311 cond
= TCG_COND_LEU
;
5318 int cc
= (ctx
->opcode
>> 8) & 0x7;
5319 TCGv_i64 t64
= tcg_temp_new_i64();
5320 TCGv_i32 t32
= tcg_temp_new_i32();
5322 tcg_gen_setcond_i64(cond
, t64
, t0
, t1
);
5323 tcg_gen_extrl_i64_i32(t32
, t64
);
5324 tcg_gen_deposit_i32(fpu_fcr31
, fpu_fcr31
, t32
,
5327 tcg_temp_free_i32(t32
);
5328 tcg_temp_free_i64(t64
);
5333 MIPS_INVAL("loongson_cp2");
5334 gen_reserved_instruction(ctx
);
5338 gen_store_fpr64(ctx
, t0
, rd
);
5341 tcg_temp_free_i64(t0
);
5342 tcg_temp_free_i64(t1
);
5345 static void gen_loongson_lswc2(DisasContext
*ctx
, int rt
,
5350 #if defined(TARGET_MIPS64)
5351 int lsq_rt1
= ctx
->opcode
& 0x1f;
5352 int lsq_offset
= sextract32(ctx
->opcode
, 6, 9) << 4;
5354 int shf_offset
= sextract32(ctx
->opcode
, 6, 8);
5356 t0
= tcg_temp_new();
5358 switch (MASK_LOONGSON_GSLSQ(ctx
->opcode
)) {
5359 #if defined(TARGET_MIPS64)
5361 t1
= tcg_temp_new();
5362 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
5363 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5364 ctx
->default_tcg_memop_mask
);
5365 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
5366 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
5367 ctx
->default_tcg_memop_mask
);
5368 gen_store_gpr(t1
, rt
);
5369 gen_store_gpr(t0
, lsq_rt1
);
5373 check_cp1_enabled(ctx
);
5374 t1
= tcg_temp_new();
5375 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
5376 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5377 ctx
->default_tcg_memop_mask
);
5378 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
5379 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
5380 ctx
->default_tcg_memop_mask
);
5381 gen_store_fpr64(ctx
, t1
, rt
);
5382 gen_store_fpr64(ctx
, t0
, lsq_rt1
);
5386 t1
= tcg_temp_new();
5387 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
5388 gen_load_gpr(t1
, rt
);
5389 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5390 ctx
->default_tcg_memop_mask
);
5391 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
5392 gen_load_gpr(t1
, lsq_rt1
);
5393 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5394 ctx
->default_tcg_memop_mask
);
5398 check_cp1_enabled(ctx
);
5399 t1
= tcg_temp_new();
5400 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
);
5401 gen_load_fpr64(ctx
, t1
, rt
);
5402 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5403 ctx
->default_tcg_memop_mask
);
5404 gen_base_offset_addr(ctx
, t0
, rs
, lsq_offset
+ 8);
5405 gen_load_fpr64(ctx
, t1
, lsq_rt1
);
5406 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5407 ctx
->default_tcg_memop_mask
);
5412 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
5414 check_cp1_enabled(ctx
);
5415 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5416 t1
= tcg_temp_new();
5417 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
5418 tcg_gen_andi_tl(t1
, t0
, 3);
5419 #ifndef TARGET_WORDS_BIGENDIAN
5420 tcg_gen_xori_tl(t1
, t1
, 3);
5422 tcg_gen_shli_tl(t1
, t1
, 3);
5423 tcg_gen_andi_tl(t0
, t0
, ~3);
5424 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
5425 tcg_gen_shl_tl(t0
, t0
, t1
);
5426 t2
= tcg_const_tl(-1);
5427 tcg_gen_shl_tl(t2
, t2
, t1
);
5428 fp0
= tcg_temp_new_i32();
5429 gen_load_fpr32(ctx
, fp0
, rt
);
5430 tcg_gen_ext_i32_tl(t1
, fp0
);
5431 tcg_gen_andc_tl(t1
, t1
, t2
);
5433 tcg_gen_or_tl(t0
, t0
, t1
);
5435 #if defined(TARGET_MIPS64)
5436 tcg_gen_extrl_i64_i32(fp0
, t0
);
5438 tcg_gen_ext32s_tl(fp0
, t0
);
5440 gen_store_fpr32(ctx
, fp0
, rt
);
5441 tcg_temp_free_i32(fp0
);
5444 check_cp1_enabled(ctx
);
5445 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5446 t1
= tcg_temp_new();
5447 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
5448 tcg_gen_andi_tl(t1
, t0
, 3);
5449 #ifdef TARGET_WORDS_BIGENDIAN
5450 tcg_gen_xori_tl(t1
, t1
, 3);
5452 tcg_gen_shli_tl(t1
, t1
, 3);
5453 tcg_gen_andi_tl(t0
, t0
, ~3);
5454 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEUL
);
5455 tcg_gen_shr_tl(t0
, t0
, t1
);
5456 tcg_gen_xori_tl(t1
, t1
, 31);
5457 t2
= tcg_const_tl(0xfffffffeull
);
5458 tcg_gen_shl_tl(t2
, t2
, t1
);
5459 fp0
= tcg_temp_new_i32();
5460 gen_load_fpr32(ctx
, fp0
, rt
);
5461 tcg_gen_ext_i32_tl(t1
, fp0
);
5462 tcg_gen_and_tl(t1
, t1
, t2
);
5464 tcg_gen_or_tl(t0
, t0
, t1
);
5466 #if defined(TARGET_MIPS64)
5467 tcg_gen_extrl_i64_i32(fp0
, t0
);
5469 tcg_gen_ext32s_tl(fp0
, t0
);
5471 gen_store_fpr32(ctx
, fp0
, rt
);
5472 tcg_temp_free_i32(fp0
);
5474 #if defined(TARGET_MIPS64)
5476 check_cp1_enabled(ctx
);
5477 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5478 t1
= tcg_temp_new();
5479 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
5480 tcg_gen_andi_tl(t1
, t0
, 7);
5481 #ifndef TARGET_WORDS_BIGENDIAN
5482 tcg_gen_xori_tl(t1
, t1
, 7);
5484 tcg_gen_shli_tl(t1
, t1
, 3);
5485 tcg_gen_andi_tl(t0
, t0
, ~7);
5486 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
5487 tcg_gen_shl_tl(t0
, t0
, t1
);
5488 t2
= tcg_const_tl(-1);
5489 tcg_gen_shl_tl(t2
, t2
, t1
);
5490 gen_load_fpr64(ctx
, t1
, rt
);
5491 tcg_gen_andc_tl(t1
, t1
, t2
);
5493 tcg_gen_or_tl(t0
, t0
, t1
);
5495 gen_store_fpr64(ctx
, t0
, rt
);
5498 check_cp1_enabled(ctx
);
5499 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5500 t1
= tcg_temp_new();
5501 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
5502 tcg_gen_andi_tl(t1
, t0
, 7);
5503 #ifdef TARGET_WORDS_BIGENDIAN
5504 tcg_gen_xori_tl(t1
, t1
, 7);
5506 tcg_gen_shli_tl(t1
, t1
, 3);
5507 tcg_gen_andi_tl(t0
, t0
, ~7);
5508 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
5509 tcg_gen_shr_tl(t0
, t0
, t1
);
5510 tcg_gen_xori_tl(t1
, t1
, 63);
5511 t2
= tcg_const_tl(0xfffffffffffffffeull
);
5512 tcg_gen_shl_tl(t2
, t2
, t1
);
5513 gen_load_fpr64(ctx
, t1
, rt
);
5514 tcg_gen_and_tl(t1
, t1
, t2
);
5516 tcg_gen_or_tl(t0
, t0
, t1
);
5518 gen_store_fpr64(ctx
, t0
, rt
);
5522 MIPS_INVAL("loongson_gsshfl");
5523 gen_reserved_instruction(ctx
);
5528 switch (MASK_LOONGSON_GSSHFLS(ctx
->opcode
)) {
5530 check_cp1_enabled(ctx
);
5531 t1
= tcg_temp_new();
5532 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5533 fp0
= tcg_temp_new_i32();
5534 gen_load_fpr32(ctx
, fp0
, rt
);
5535 tcg_gen_ext_i32_tl(t1
, fp0
);
5536 gen_helper_0e2i(swl
, t1
, t0
, ctx
->mem_idx
);
5537 tcg_temp_free_i32(fp0
);
5541 check_cp1_enabled(ctx
);
5542 t1
= tcg_temp_new();
5543 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5544 fp0
= tcg_temp_new_i32();
5545 gen_load_fpr32(ctx
, fp0
, rt
);
5546 tcg_gen_ext_i32_tl(t1
, fp0
);
5547 gen_helper_0e2i(swr
, t1
, t0
, ctx
->mem_idx
);
5548 tcg_temp_free_i32(fp0
);
5551 #if defined(TARGET_MIPS64)
5553 check_cp1_enabled(ctx
);
5554 t1
= tcg_temp_new();
5555 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5556 gen_load_fpr64(ctx
, t1
, rt
);
5557 gen_helper_0e2i(sdl
, t1
, t0
, ctx
->mem_idx
);
5561 check_cp1_enabled(ctx
);
5562 t1
= tcg_temp_new();
5563 gen_base_offset_addr(ctx
, t0
, rs
, shf_offset
);
5564 gen_load_fpr64(ctx
, t1
, rt
);
5565 gen_helper_0e2i(sdr
, t1
, t0
, ctx
->mem_idx
);
5570 MIPS_INVAL("loongson_gsshfs");
5571 gen_reserved_instruction(ctx
);
5576 MIPS_INVAL("loongson_gslsq");
5577 gen_reserved_instruction(ctx
);
5583 /* Loongson EXT LDC2/SDC2 */
5584 static void gen_loongson_lsdc2(DisasContext
*ctx
, int rt
,
5587 int offset
= sextract32(ctx
->opcode
, 3, 8);
5588 uint32_t opc
= MASK_LOONGSON_LSDC2(ctx
->opcode
);
5592 /* Pre-conditions */
5598 /* prefetch, implement as NOP */
5609 #if defined(TARGET_MIPS64)
5612 check_cp1_enabled(ctx
);
5613 /* prefetch, implement as NOP */
5619 #if defined(TARGET_MIPS64)
5622 check_cp1_enabled(ctx
);
5625 MIPS_INVAL("loongson_lsdc2");
5626 gen_reserved_instruction(ctx
);
5631 t0
= tcg_temp_new();
5633 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
5634 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
5638 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_SB
);
5639 gen_store_gpr(t0
, rt
);
5642 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
5643 ctx
->default_tcg_memop_mask
);
5644 gen_store_gpr(t0
, rt
);
5647 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
5649 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
5651 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
|
5652 ctx
->default_tcg_memop_mask
);
5653 gen_store_gpr(t0
, rt
);
5655 #if defined(TARGET_MIPS64)
5657 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
5659 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
5661 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
5662 ctx
->default_tcg_memop_mask
);
5663 gen_store_gpr(t0
, rt
);
5667 check_cp1_enabled(ctx
);
5668 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
5670 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
5672 fp0
= tcg_temp_new_i32();
5673 tcg_gen_qemu_ld_i32(fp0
, t0
, ctx
->mem_idx
, MO_TESL
|
5674 ctx
->default_tcg_memop_mask
);
5675 gen_store_fpr32(ctx
, fp0
, rt
);
5676 tcg_temp_free_i32(fp0
);
5678 #if defined(TARGET_MIPS64)
5680 check_cp1_enabled(ctx
);
5681 gen_base_offset_addr(ctx
, t0
, rs
, offset
);
5683 gen_op_addr_add(ctx
, t0
, cpu_gpr
[rd
], t0
);
5685 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
|
5686 ctx
->default_tcg_memop_mask
);
5687 gen_store_fpr64(ctx
, t0
, rt
);
5691 t1
= tcg_temp_new();
5692 gen_load_gpr(t1
, rt
);
5693 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
5697 t1
= tcg_temp_new();
5698 gen_load_gpr(t1
, rt
);
5699 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
5700 ctx
->default_tcg_memop_mask
);
5704 t1
= tcg_temp_new();
5705 gen_load_gpr(t1
, rt
);
5706 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
|
5707 ctx
->default_tcg_memop_mask
);
5710 #if defined(TARGET_MIPS64)
5712 t1
= tcg_temp_new();
5713 gen_load_gpr(t1
, rt
);
5714 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5715 ctx
->default_tcg_memop_mask
);
5720 fp0
= tcg_temp_new_i32();
5721 gen_load_fpr32(ctx
, fp0
, rt
);
5722 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
|
5723 ctx
->default_tcg_memop_mask
);
5724 tcg_temp_free_i32(fp0
);
5726 #if defined(TARGET_MIPS64)
5728 t1
= tcg_temp_new();
5729 gen_load_fpr64(ctx
, t1
, rt
);
5730 tcg_gen_qemu_st_i64(t1
, t0
, ctx
->mem_idx
, MO_TEQ
|
5731 ctx
->default_tcg_memop_mask
);
5743 static void gen_trap(DisasContext
*ctx
, uint32_t opc
,
5744 int rs
, int rt
, int16_t imm
)
5747 TCGv t0
= tcg_temp_new();
5748 TCGv t1
= tcg_temp_new();
5751 /* Load needed operands */
5759 /* Compare two registers */
5761 gen_load_gpr(t0
, rs
);
5762 gen_load_gpr(t1
, rt
);
5772 /* Compare register to immediate */
5773 if (rs
!= 0 || imm
!= 0) {
5774 gen_load_gpr(t0
, rs
);
5775 tcg_gen_movi_tl(t1
, (int32_t)imm
);
5782 case OPC_TEQ
: /* rs == rs */
5783 case OPC_TEQI
: /* r0 == 0 */
5784 case OPC_TGE
: /* rs >= rs */
5785 case OPC_TGEI
: /* r0 >= 0 */
5786 case OPC_TGEU
: /* rs >= rs unsigned */
5787 case OPC_TGEIU
: /* r0 >= 0 unsigned */
5789 generate_exception_end(ctx
, EXCP_TRAP
);
5791 case OPC_TLT
: /* rs < rs */
5792 case OPC_TLTI
: /* r0 < 0 */
5793 case OPC_TLTU
: /* rs < rs unsigned */
5794 case OPC_TLTIU
: /* r0 < 0 unsigned */
5795 case OPC_TNE
: /* rs != rs */
5796 case OPC_TNEI
: /* r0 != 0 */
5797 /* Never trap: treat as NOP. */
5801 TCGLabel
*l1
= gen_new_label();
5806 tcg_gen_brcond_tl(TCG_COND_NE
, t0
, t1
, l1
);
5810 tcg_gen_brcond_tl(TCG_COND_LT
, t0
, t1
, l1
);
5814 tcg_gen_brcond_tl(TCG_COND_LTU
, t0
, t1
, l1
);
5818 tcg_gen_brcond_tl(TCG_COND_GE
, t0
, t1
, l1
);
5822 tcg_gen_brcond_tl(TCG_COND_GEU
, t0
, t1
, l1
);
5826 tcg_gen_brcond_tl(TCG_COND_EQ
, t0
, t1
, l1
);
5829 generate_exception(ctx
, EXCP_TRAP
);
5836 static inline bool use_goto_tb(DisasContext
*ctx
, target_ulong dest
)
5838 if (unlikely(ctx
->base
.singlestep_enabled
)) {
5842 #ifndef CONFIG_USER_ONLY
5843 return (ctx
->base
.tb
->pc
& TARGET_PAGE_MASK
) == (dest
& TARGET_PAGE_MASK
);
5849 static inline void gen_goto_tb(DisasContext
*ctx
, int n
, target_ulong dest
)
5851 if (use_goto_tb(ctx
, dest
)) {
5854 tcg_gen_exit_tb(ctx
->base
.tb
, n
);
5857 if (ctx
->base
.singlestep_enabled
) {
5858 save_cpu_state(ctx
, 0);
5859 gen_helper_raise_exception_debug(cpu_env
);
5861 tcg_gen_lookup_and_goto_ptr();
5865 /* Branches (before delay slot) */
5866 static void gen_compute_branch(DisasContext
*ctx
, uint32_t opc
,
5868 int rs
, int rt
, int32_t offset
,
5871 target_ulong btgt
= -1;
5873 int bcond_compute
= 0;
5874 TCGv t0
= tcg_temp_new();
5875 TCGv t1
= tcg_temp_new();
5877 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
5878 #ifdef MIPS_DEBUG_DISAS
5879 LOG_DISAS("Branch in delay / forbidden slot at PC 0x"
5880 TARGET_FMT_lx
"\n", ctx
->base
.pc_next
);
5882 gen_reserved_instruction(ctx
);
5886 /* Load needed operands */
5892 /* Compare two registers */
5894 gen_load_gpr(t0
, rs
);
5895 gen_load_gpr(t1
, rt
);
5898 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5912 /* Compare to zero */
5914 gen_load_gpr(t0
, rs
);
5917 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5920 #if defined(TARGET_MIPS64)
5922 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x7F);
5924 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
5927 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
5932 /* Jump to immediate */
5933 btgt
= ((ctx
->base
.pc_next
+ insn_bytes
) & (int32_t)0xF0000000) |
5938 /* Jump to register */
5939 if (offset
!= 0 && offset
!= 16) {
5941 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
5942 * others are reserved.
5944 MIPS_INVAL("jump hint");
5945 gen_reserved_instruction(ctx
);
5948 gen_load_gpr(btarget
, rs
);
5951 MIPS_INVAL("branch/jump");
5952 gen_reserved_instruction(ctx
);
5955 if (bcond_compute
== 0) {
5956 /* No condition to be computed */
5958 case OPC_BEQ
: /* rx == rx */
5959 case OPC_BEQL
: /* rx == rx likely */
5960 case OPC_BGEZ
: /* 0 >= 0 */
5961 case OPC_BGEZL
: /* 0 >= 0 likely */
5962 case OPC_BLEZ
: /* 0 <= 0 */
5963 case OPC_BLEZL
: /* 0 <= 0 likely */
5965 ctx
->hflags
|= MIPS_HFLAG_B
;
5967 case OPC_BGEZAL
: /* 0 >= 0 */
5968 case OPC_BGEZALL
: /* 0 >= 0 likely */
5969 /* Always take and link */
5971 ctx
->hflags
|= MIPS_HFLAG_B
;
5973 case OPC_BNE
: /* rx != rx */
5974 case OPC_BGTZ
: /* 0 > 0 */
5975 case OPC_BLTZ
: /* 0 < 0 */
5978 case OPC_BLTZAL
: /* 0 < 0 */
5980 * Handle as an unconditional branch to get correct delay
5984 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ delayslot_size
;
5985 ctx
->hflags
|= MIPS_HFLAG_B
;
5987 case OPC_BLTZALL
: /* 0 < 0 likely */
5988 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
5989 /* Skip the instruction in the delay slot */
5990 ctx
->base
.pc_next
+= 4;
5992 case OPC_BNEL
: /* rx != rx likely */
5993 case OPC_BGTZL
: /* 0 > 0 likely */
5994 case OPC_BLTZL
: /* 0 < 0 likely */
5995 /* Skip the instruction in the delay slot */
5996 ctx
->base
.pc_next
+= 4;
5999 ctx
->hflags
|= MIPS_HFLAG_B
;
6002 ctx
->hflags
|= MIPS_HFLAG_BX
;
6006 ctx
->hflags
|= MIPS_HFLAG_B
;
6009 ctx
->hflags
|= MIPS_HFLAG_BR
;
6013 ctx
->hflags
|= MIPS_HFLAG_BR
;
6016 MIPS_INVAL("branch/jump");
6017 gen_reserved_instruction(ctx
);
6023 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6026 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6029 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6032 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6035 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6038 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6041 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6045 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6049 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6052 tcg_gen_setcondi_tl(TCG_COND_GT
, bcond
, t0
, 0);
6055 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6058 tcg_gen_setcondi_tl(TCG_COND_LE
, bcond
, t0
, 0);
6061 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6064 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6067 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6069 #if defined(TARGET_MIPS64)
6071 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 64);
6075 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6078 ctx
->hflags
|= MIPS_HFLAG_BC
;
6081 tcg_gen_setcondi_tl(TCG_COND_LT
, bcond
, t0
, 0);
6084 ctx
->hflags
|= MIPS_HFLAG_BL
;
6087 MIPS_INVAL("conditional branch/jump");
6088 gen_reserved_instruction(ctx
);
6093 ctx
->btarget
= btgt
;
6095 switch (delayslot_size
) {
6097 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
6100 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
6105 int post_delay
= insn_bytes
+ delayslot_size
;
6106 int lowbit
= !!(ctx
->hflags
& MIPS_HFLAG_M16
);
6108 tcg_gen_movi_tl(cpu_gpr
[blink
],
6109 ctx
->base
.pc_next
+ post_delay
+ lowbit
);
6113 if (insn_bytes
== 2) {
6114 ctx
->hflags
|= MIPS_HFLAG_B16
;
6121 /* nanoMIPS Branches */
6122 static void gen_compute_branch_nm(DisasContext
*ctx
, uint32_t opc
,
6124 int rs
, int rt
, int32_t offset
)
6126 target_ulong btgt
= -1;
6127 int bcond_compute
= 0;
6128 TCGv t0
= tcg_temp_new();
6129 TCGv t1
= tcg_temp_new();
6131 /* Load needed operands */
6135 /* Compare two registers */
6137 gen_load_gpr(t0
, rs
);
6138 gen_load_gpr(t1
, rt
);
6141 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6144 /* Compare to zero */
6146 gen_load_gpr(t0
, rs
);
6149 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6152 tcg_gen_andi_tl(t0
, cpu_dspctrl
, 0x3F);
6154 btgt
= ctx
->base
.pc_next
+ insn_bytes
+ offset
;
6158 /* Jump to register */
6159 if (offset
!= 0 && offset
!= 16) {
6161 * Hint = 0 is JR/JALR, hint 16 is JR.HB/JALR.HB, the
6162 * others are reserved.
6164 MIPS_INVAL("jump hint");
6165 gen_reserved_instruction(ctx
);
6168 gen_load_gpr(btarget
, rs
);
6171 MIPS_INVAL("branch/jump");
6172 gen_reserved_instruction(ctx
);
6175 if (bcond_compute
== 0) {
6176 /* No condition to be computed */
6178 case OPC_BEQ
: /* rx == rx */
6180 ctx
->hflags
|= MIPS_HFLAG_B
;
6182 case OPC_BGEZAL
: /* 0 >= 0 */
6183 /* Always take and link */
6184 tcg_gen_movi_tl(cpu_gpr
[31],
6185 ctx
->base
.pc_next
+ insn_bytes
);
6186 ctx
->hflags
|= MIPS_HFLAG_B
;
6188 case OPC_BNE
: /* rx != rx */
6189 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 8);
6190 /* Skip the instruction in the delay slot */
6191 ctx
->base
.pc_next
+= 4;
6194 ctx
->hflags
|= MIPS_HFLAG_BR
;
6198 tcg_gen_movi_tl(cpu_gpr
[rt
],
6199 ctx
->base
.pc_next
+ insn_bytes
);
6201 ctx
->hflags
|= MIPS_HFLAG_BR
;
6204 MIPS_INVAL("branch/jump");
6205 gen_reserved_instruction(ctx
);
6211 tcg_gen_setcond_tl(TCG_COND_EQ
, bcond
, t0
, t1
);
6214 tcg_gen_setcond_tl(TCG_COND_NE
, bcond
, t0
, t1
);
6217 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 0);
6218 tcg_gen_movi_tl(cpu_gpr
[31],
6219 ctx
->base
.pc_next
+ insn_bytes
);
6222 tcg_gen_setcondi_tl(TCG_COND_GE
, bcond
, t0
, 32);
6224 ctx
->hflags
|= MIPS_HFLAG_BC
;
6227 MIPS_INVAL("conditional branch/jump");
6228 gen_reserved_instruction(ctx
);
6233 ctx
->btarget
= btgt
;
6236 if (insn_bytes
== 2) {
6237 ctx
->hflags
|= MIPS_HFLAG_B16
;
6244 /* special3 bitfield operations */
6245 static void gen_bitops(DisasContext
*ctx
, uint32_t opc
, int rt
,
6246 int rs
, int lsb
, int msb
)
6248 TCGv t0
= tcg_temp_new();
6249 TCGv t1
= tcg_temp_new();
6251 gen_load_gpr(t1
, rs
);
6254 if (lsb
+ msb
> 31) {
6258 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6261 * The two checks together imply that lsb == 0,
6262 * so this is a simple sign-extension.
6264 tcg_gen_ext32s_tl(t0
, t1
);
6267 #if defined(TARGET_MIPS64)
6276 if (lsb
+ msb
> 63) {
6279 tcg_gen_extract_tl(t0
, t1
, lsb
, msb
+ 1);
6286 gen_load_gpr(t0
, rt
);
6287 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6288 tcg_gen_ext32s_tl(t0
, t0
);
6290 #if defined(TARGET_MIPS64)
6301 gen_load_gpr(t0
, rt
);
6302 tcg_gen_deposit_tl(t0
, t0
, t1
, lsb
, msb
- lsb
+ 1);
6307 MIPS_INVAL("bitops");
6308 gen_reserved_instruction(ctx
);
6313 gen_store_gpr(t0
, rt
);
6318 static void gen_bshfl(DisasContext
*ctx
, uint32_t op2
, int rt
, int rd
)
6323 /* If no destination, treat it as a NOP. */
6327 t0
= tcg_temp_new();
6328 gen_load_gpr(t0
, rt
);
6332 TCGv t1
= tcg_temp_new();
6333 TCGv t2
= tcg_const_tl(0x00FF00FF);
6335 tcg_gen_shri_tl(t1
, t0
, 8);
6336 tcg_gen_and_tl(t1
, t1
, t2
);
6337 tcg_gen_and_tl(t0
, t0
, t2
);
6338 tcg_gen_shli_tl(t0
, t0
, 8);
6339 tcg_gen_or_tl(t0
, t0
, t1
);
6342 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6346 tcg_gen_ext8s_tl(cpu_gpr
[rd
], t0
);
6349 tcg_gen_ext16s_tl(cpu_gpr
[rd
], t0
);
6351 #if defined(TARGET_MIPS64)
6354 TCGv t1
= tcg_temp_new();
6355 TCGv t2
= tcg_const_tl(0x00FF00FF00FF00FFULL
);
6357 tcg_gen_shri_tl(t1
, t0
, 8);
6358 tcg_gen_and_tl(t1
, t1
, t2
);
6359 tcg_gen_and_tl(t0
, t0
, t2
);
6360 tcg_gen_shli_tl(t0
, t0
, 8);
6361 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6368 TCGv t1
= tcg_temp_new();
6369 TCGv t2
= tcg_const_tl(0x0000FFFF0000FFFFULL
);
6371 tcg_gen_shri_tl(t1
, t0
, 16);
6372 tcg_gen_and_tl(t1
, t1
, t2
);
6373 tcg_gen_and_tl(t0
, t0
, t2
);
6374 tcg_gen_shli_tl(t0
, t0
, 16);
6375 tcg_gen_or_tl(t0
, t0
, t1
);
6376 tcg_gen_shri_tl(t1
, t0
, 32);
6377 tcg_gen_shli_tl(t0
, t0
, 32);
6378 tcg_gen_or_tl(cpu_gpr
[rd
], t0
, t1
);
6385 MIPS_INVAL("bsfhl");
6386 gen_reserved_instruction(ctx
);
6393 static void gen_align_bits(DisasContext
*ctx
, int wordsz
, int rd
, int rs
,
6401 t0
= tcg_temp_new();
6402 if (bits
== 0 || bits
== wordsz
) {
6404 gen_load_gpr(t0
, rt
);
6406 gen_load_gpr(t0
, rs
);
6410 tcg_gen_ext32s_tl(cpu_gpr
[rd
], t0
);
6412 #if defined(TARGET_MIPS64)
6414 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
6419 TCGv t1
= tcg_temp_new();
6420 gen_load_gpr(t0
, rt
);
6421 gen_load_gpr(t1
, rs
);
6425 TCGv_i64 t2
= tcg_temp_new_i64();
6426 tcg_gen_concat_tl_i64(t2
, t1
, t0
);
6427 tcg_gen_shri_i64(t2
, t2
, 32 - bits
);
6428 gen_move_low32(cpu_gpr
[rd
], t2
);
6429 tcg_temp_free_i64(t2
);
6432 #if defined(TARGET_MIPS64)
6434 tcg_gen_shli_tl(t0
, t0
, bits
);
6435 tcg_gen_shri_tl(t1
, t1
, 64 - bits
);
6436 tcg_gen_or_tl(cpu_gpr
[rd
], t1
, t0
);
6446 static void gen_align(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6449 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, bp
* 8);
6452 static void gen_ext(DisasContext
*ctx
, int wordsz
, int rd
, int rs
, int rt
,
6455 gen_align_bits(ctx
, wordsz
, rd
, rs
, rt
, wordsz
- shift
);
6458 static void gen_bitswap(DisasContext
*ctx
, int opc
, int rd
, int rt
)
6465 t0
= tcg_temp_new();
6466 gen_load_gpr(t0
, rt
);
6469 gen_helper_bitswap(cpu_gpr
[rd
], t0
);
6471 #if defined(TARGET_MIPS64)
6473 gen_helper_dbitswap(cpu_gpr
[rd
], t0
);
6480 #ifndef CONFIG_USER_ONLY
6481 /* CP0 (MMU and control) */
6482 static inline void gen_mthc0_entrylo(TCGv arg
, target_ulong off
)
6484 TCGv_i64 t0
= tcg_temp_new_i64();
6485 TCGv_i64 t1
= tcg_temp_new_i64();
6487 tcg_gen_ext_tl_i64(t0
, arg
);
6488 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6489 #if defined(TARGET_MIPS64)
6490 tcg_gen_deposit_i64(t1
, t1
, t0
, 30, 32);
6492 tcg_gen_concat32_i64(t1
, t1
, t0
);
6494 tcg_gen_st_i64(t1
, cpu_env
, off
);
6495 tcg_temp_free_i64(t1
);
6496 tcg_temp_free_i64(t0
);
6499 static inline void gen_mthc0_store64(TCGv arg
, target_ulong off
)
6501 TCGv_i64 t0
= tcg_temp_new_i64();
6502 TCGv_i64 t1
= tcg_temp_new_i64();
6504 tcg_gen_ext_tl_i64(t0
, arg
);
6505 tcg_gen_ld_i64(t1
, cpu_env
, off
);
6506 tcg_gen_concat32_i64(t1
, t1
, t0
);
6507 tcg_gen_st_i64(t1
, cpu_env
, off
);
6508 tcg_temp_free_i64(t1
);
6509 tcg_temp_free_i64(t0
);
6512 static inline void gen_mfhc0_entrylo(TCGv arg
, target_ulong off
)
6514 TCGv_i64 t0
= tcg_temp_new_i64();
6516 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6517 #if defined(TARGET_MIPS64)
6518 tcg_gen_shri_i64(t0
, t0
, 30);
6520 tcg_gen_shri_i64(t0
, t0
, 32);
6522 gen_move_low32(arg
, t0
);
6523 tcg_temp_free_i64(t0
);
6526 static inline void gen_mfhc0_load64(TCGv arg
, target_ulong off
, int shift
)
6528 TCGv_i64 t0
= tcg_temp_new_i64();
6530 tcg_gen_ld_i64(t0
, cpu_env
, off
);
6531 tcg_gen_shri_i64(t0
, t0
, 32 + shift
);
6532 gen_move_low32(arg
, t0
);
6533 tcg_temp_free_i64(t0
);
6536 static inline void gen_mfc0_load32(TCGv arg
, target_ulong off
)
6538 TCGv_i32 t0
= tcg_temp_new_i32();
6540 tcg_gen_ld_i32(t0
, cpu_env
, off
);
6541 tcg_gen_ext_i32_tl(arg
, t0
);
6542 tcg_temp_free_i32(t0
);
6545 static inline void gen_mfc0_load64(TCGv arg
, target_ulong off
)
6547 tcg_gen_ld_tl(arg
, cpu_env
, off
);
6548 tcg_gen_ext32s_tl(arg
, arg
);
6551 static inline void gen_mtc0_store32(TCGv arg
, target_ulong off
)
6553 TCGv_i32 t0
= tcg_temp_new_i32();
6555 tcg_gen_trunc_tl_i32(t0
, arg
);
6556 tcg_gen_st_i32(t0
, cpu_env
, off
);
6557 tcg_temp_free_i32(t0
);
6560 #define CP0_CHECK(c) \
6563 goto cp0_unimplemented; \
6567 static void gen_mfhc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6569 const char *register_name
= "invalid";
6572 case CP0_REGISTER_02
:
6575 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6576 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6577 register_name
= "EntryLo0";
6580 goto cp0_unimplemented
;
6583 case CP0_REGISTER_03
:
6585 case CP0_REG03__ENTRYLO1
:
6586 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6587 gen_mfhc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6588 register_name
= "EntryLo1";
6591 goto cp0_unimplemented
;
6594 case CP0_REGISTER_09
:
6596 case CP0_REG09__SAAR
:
6597 CP0_CHECK(ctx
->saar
);
6598 gen_helper_mfhc0_saar(arg
, cpu_env
);
6599 register_name
= "SAAR";
6602 goto cp0_unimplemented
;
6605 case CP0_REGISTER_17
:
6607 case CP0_REG17__LLADDR
:
6608 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_LLAddr
),
6609 ctx
->CP0_LLAddr_shift
);
6610 register_name
= "LLAddr";
6612 case CP0_REG17__MAAR
:
6613 CP0_CHECK(ctx
->mrp
);
6614 gen_helper_mfhc0_maar(arg
, cpu_env
);
6615 register_name
= "MAAR";
6618 goto cp0_unimplemented
;
6621 case CP0_REGISTER_19
:
6623 case CP0_REG19__WATCHHI0
:
6624 case CP0_REG19__WATCHHI1
:
6625 case CP0_REG19__WATCHHI2
:
6626 case CP0_REG19__WATCHHI3
:
6627 case CP0_REG19__WATCHHI4
:
6628 case CP0_REG19__WATCHHI5
:
6629 case CP0_REG19__WATCHHI6
:
6630 case CP0_REG19__WATCHHI7
:
6631 /* upper 32 bits are only available when Config5MI != 0 */
6633 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_WatchHi
[sel
]), 0);
6634 register_name
= "WatchHi";
6637 goto cp0_unimplemented
;
6640 case CP0_REGISTER_28
:
6646 gen_mfhc0_load64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
), 0);
6647 register_name
= "TagLo";
6650 goto cp0_unimplemented
;
6654 goto cp0_unimplemented
;
6656 trace_mips_translate_c0("mfhc0", register_name
, reg
, sel
);
6660 qemu_log_mask(LOG_UNIMP
, "mfhc0 %s (reg %d sel %d)\n",
6661 register_name
, reg
, sel
);
6662 tcg_gen_movi_tl(arg
, 0);
6665 static void gen_mthc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6667 const char *register_name
= "invalid";
6668 uint64_t mask
= ctx
->PAMask
>> 36;
6671 case CP0_REGISTER_02
:
6674 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6675 tcg_gen_andi_tl(arg
, arg
, mask
);
6676 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo0
));
6677 register_name
= "EntryLo0";
6680 goto cp0_unimplemented
;
6683 case CP0_REGISTER_03
:
6685 case CP0_REG03__ENTRYLO1
:
6686 CP0_CHECK(ctx
->hflags
& MIPS_HFLAG_ELPA
);
6687 tcg_gen_andi_tl(arg
, arg
, mask
);
6688 gen_mthc0_entrylo(arg
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
6689 register_name
= "EntryLo1";
6692 goto cp0_unimplemented
;
6695 case CP0_REGISTER_09
:
6697 case CP0_REG09__SAAR
:
6698 CP0_CHECK(ctx
->saar
);
6699 gen_helper_mthc0_saar(cpu_env
, arg
);
6700 register_name
= "SAAR";
6703 goto cp0_unimplemented
;
6706 case CP0_REGISTER_17
:
6708 case CP0_REG17__LLADDR
:
6710 * LLAddr is read-only (the only exception is bit 0 if LLB is
6711 * supported); the CP0_LLAddr_rw_bitmask does not seem to be
6712 * relevant for modern MIPS cores supporting MTHC0, therefore
6713 * treating MTHC0 to LLAddr as NOP.
6715 register_name
= "LLAddr";
6717 case CP0_REG17__MAAR
:
6718 CP0_CHECK(ctx
->mrp
);
6719 gen_helper_mthc0_maar(cpu_env
, arg
);
6720 register_name
= "MAAR";
6723 goto cp0_unimplemented
;
6726 case CP0_REGISTER_19
:
6728 case CP0_REG19__WATCHHI0
:
6729 case CP0_REG19__WATCHHI1
:
6730 case CP0_REG19__WATCHHI2
:
6731 case CP0_REG19__WATCHHI3
:
6732 case CP0_REG19__WATCHHI4
:
6733 case CP0_REG19__WATCHHI5
:
6734 case CP0_REG19__WATCHHI6
:
6735 case CP0_REG19__WATCHHI7
:
6736 /* upper 32 bits are only available when Config5MI != 0 */
6738 gen_helper_0e1i(mthc0_watchhi
, arg
, sel
);
6739 register_name
= "WatchHi";
6742 goto cp0_unimplemented
;
6745 case CP0_REGISTER_28
:
6751 tcg_gen_andi_tl(arg
, arg
, mask
);
6752 gen_mthc0_store64(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
6753 register_name
= "TagLo";
6756 goto cp0_unimplemented
;
6760 goto cp0_unimplemented
;
6762 trace_mips_translate_c0("mthc0", register_name
, reg
, sel
);
6765 qemu_log_mask(LOG_UNIMP
, "mthc0 %s (reg %d sel %d)\n",
6766 register_name
, reg
, sel
);
6769 static inline void gen_mfc0_unimplemented(DisasContext
*ctx
, TCGv arg
)
6771 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
6772 tcg_gen_movi_tl(arg
, 0);
6774 tcg_gen_movi_tl(arg
, ~0);
6778 static void gen_mfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
6780 const char *register_name
= "invalid";
6783 check_insn(ctx
, ISA_MIPS_R1
);
6787 case CP0_REGISTER_00
:
6789 case CP0_REG00__INDEX
:
6790 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
6791 register_name
= "Index";
6793 case CP0_REG00__MVPCONTROL
:
6794 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6795 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
6796 register_name
= "MVPControl";
6798 case CP0_REG00__MVPCONF0
:
6799 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6800 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
6801 register_name
= "MVPConf0";
6803 case CP0_REG00__MVPCONF1
:
6804 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6805 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
6806 register_name
= "MVPConf1";
6808 case CP0_REG00__VPCONTROL
:
6810 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
6811 register_name
= "VPControl";
6814 goto cp0_unimplemented
;
6817 case CP0_REGISTER_01
:
6819 case CP0_REG01__RANDOM
:
6820 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
6821 gen_helper_mfc0_random(arg
, cpu_env
);
6822 register_name
= "Random";
6824 case CP0_REG01__VPECONTROL
:
6825 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6826 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
6827 register_name
= "VPEControl";
6829 case CP0_REG01__VPECONF0
:
6830 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6831 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
6832 register_name
= "VPEConf0";
6834 case CP0_REG01__VPECONF1
:
6835 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
6837 register_name
= "VPEConf1";
6839 case CP0_REG01__YQMASK
:
6840 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6841 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_YQMask
));
6842 register_name
= "YQMask";
6844 case CP0_REG01__VPESCHEDULE
:
6845 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6846 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPESchedule
));
6847 register_name
= "VPESchedule";
6849 case CP0_REG01__VPESCHEFBACK
:
6850 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6851 gen_mfc0_load64(arg
, offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
6852 register_name
= "VPEScheFBack";
6854 case CP0_REG01__VPEOPT
:
6855 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6856 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
6857 register_name
= "VPEOpt";
6860 goto cp0_unimplemented
;
6863 case CP0_REGISTER_02
:
6865 case CP0_REG02__ENTRYLO0
:
6867 TCGv_i64 tmp
= tcg_temp_new_i64();
6868 tcg_gen_ld_i64(tmp
, cpu_env
,
6869 offsetof(CPUMIPSState
, CP0_EntryLo0
));
6870 #if defined(TARGET_MIPS64)
6872 /* Move RI/XI fields to bits 31:30 */
6873 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6874 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6877 gen_move_low32(arg
, tmp
);
6878 tcg_temp_free_i64(tmp
);
6880 register_name
= "EntryLo0";
6882 case CP0_REG02__TCSTATUS
:
6883 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6884 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
6885 register_name
= "TCStatus";
6887 case CP0_REG02__TCBIND
:
6888 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6889 gen_helper_mfc0_tcbind(arg
, cpu_env
);
6890 register_name
= "TCBind";
6892 case CP0_REG02__TCRESTART
:
6893 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6894 gen_helper_mfc0_tcrestart(arg
, cpu_env
);
6895 register_name
= "TCRestart";
6897 case CP0_REG02__TCHALT
:
6898 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6899 gen_helper_mfc0_tchalt(arg
, cpu_env
);
6900 register_name
= "TCHalt";
6902 case CP0_REG02__TCCONTEXT
:
6903 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6904 gen_helper_mfc0_tccontext(arg
, cpu_env
);
6905 register_name
= "TCContext";
6907 case CP0_REG02__TCSCHEDULE
:
6908 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6909 gen_helper_mfc0_tcschedule(arg
, cpu_env
);
6910 register_name
= "TCSchedule";
6912 case CP0_REG02__TCSCHEFBACK
:
6913 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
6914 gen_helper_mfc0_tcschefback(arg
, cpu_env
);
6915 register_name
= "TCScheFBack";
6918 goto cp0_unimplemented
;
6921 case CP0_REGISTER_03
:
6923 case CP0_REG03__ENTRYLO1
:
6925 TCGv_i64 tmp
= tcg_temp_new_i64();
6926 tcg_gen_ld_i64(tmp
, cpu_env
,
6927 offsetof(CPUMIPSState
, CP0_EntryLo1
));
6928 #if defined(TARGET_MIPS64)
6930 /* Move RI/XI fields to bits 31:30 */
6931 tcg_gen_shri_tl(arg
, tmp
, CP0EnLo_XI
);
6932 tcg_gen_deposit_tl(tmp
, tmp
, arg
, 30, 2);
6935 gen_move_low32(arg
, tmp
);
6936 tcg_temp_free_i64(tmp
);
6938 register_name
= "EntryLo1";
6940 case CP0_REG03__GLOBALNUM
:
6942 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
6943 register_name
= "GlobalNumber";
6946 goto cp0_unimplemented
;
6949 case CP0_REGISTER_04
:
6951 case CP0_REG04__CONTEXT
:
6952 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
6953 tcg_gen_ext32s_tl(arg
, arg
);
6954 register_name
= "Context";
6956 case CP0_REG04__CONTEXTCONFIG
:
6958 /* gen_helper_mfc0_contextconfig(arg); */
6959 register_name
= "ContextConfig";
6960 goto cp0_unimplemented
;
6961 case CP0_REG04__USERLOCAL
:
6962 CP0_CHECK(ctx
->ulri
);
6963 tcg_gen_ld_tl(arg
, cpu_env
,
6964 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
6965 tcg_gen_ext32s_tl(arg
, arg
);
6966 register_name
= "UserLocal";
6968 case CP0_REG04__MMID
:
6970 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
6971 register_name
= "MMID";
6974 goto cp0_unimplemented
;
6977 case CP0_REGISTER_05
:
6979 case CP0_REG05__PAGEMASK
:
6980 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
6981 register_name
= "PageMask";
6983 case CP0_REG05__PAGEGRAIN
:
6984 check_insn(ctx
, ISA_MIPS_R2
);
6985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
6986 register_name
= "PageGrain";
6988 case CP0_REG05__SEGCTL0
:
6990 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
6991 tcg_gen_ext32s_tl(arg
, arg
);
6992 register_name
= "SegCtl0";
6994 case CP0_REG05__SEGCTL1
:
6996 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
6997 tcg_gen_ext32s_tl(arg
, arg
);
6998 register_name
= "SegCtl1";
7000 case CP0_REG05__SEGCTL2
:
7002 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
7003 tcg_gen_ext32s_tl(arg
, arg
);
7004 register_name
= "SegCtl2";
7006 case CP0_REG05__PWBASE
:
7008 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7009 register_name
= "PWBase";
7011 case CP0_REG05__PWFIELD
:
7013 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWField
));
7014 register_name
= "PWField";
7016 case CP0_REG05__PWSIZE
:
7018 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWSize
));
7019 register_name
= "PWSize";
7022 goto cp0_unimplemented
;
7025 case CP0_REGISTER_06
:
7027 case CP0_REG06__WIRED
:
7028 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
7029 register_name
= "Wired";
7031 case CP0_REG06__SRSCONF0
:
7032 check_insn(ctx
, ISA_MIPS_R2
);
7033 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
7034 register_name
= "SRSConf0";
7036 case CP0_REG06__SRSCONF1
:
7037 check_insn(ctx
, ISA_MIPS_R2
);
7038 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
7039 register_name
= "SRSConf1";
7041 case CP0_REG06__SRSCONF2
:
7042 check_insn(ctx
, ISA_MIPS_R2
);
7043 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
7044 register_name
= "SRSConf2";
7046 case CP0_REG06__SRSCONF3
:
7047 check_insn(ctx
, ISA_MIPS_R2
);
7048 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
7049 register_name
= "SRSConf3";
7051 case CP0_REG06__SRSCONF4
:
7052 check_insn(ctx
, ISA_MIPS_R2
);
7053 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
7054 register_name
= "SRSConf4";
7056 case CP0_REG06__PWCTL
:
7058 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
7059 register_name
= "PWCtl";
7062 goto cp0_unimplemented
;
7065 case CP0_REGISTER_07
:
7067 case CP0_REG07__HWRENA
:
7068 check_insn(ctx
, ISA_MIPS_R2
);
7069 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
7070 register_name
= "HWREna";
7073 goto cp0_unimplemented
;
7076 case CP0_REGISTER_08
:
7078 case CP0_REG08__BADVADDR
:
7079 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
7080 tcg_gen_ext32s_tl(arg
, arg
);
7081 register_name
= "BadVAddr";
7083 case CP0_REG08__BADINSTR
:
7085 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
7086 register_name
= "BadInstr";
7088 case CP0_REG08__BADINSTRP
:
7090 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
7091 register_name
= "BadInstrP";
7093 case CP0_REG08__BADINSTRX
:
7095 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
7096 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
7097 register_name
= "BadInstrX";
7100 goto cp0_unimplemented
;
7103 case CP0_REGISTER_09
:
7105 case CP0_REG09__COUNT
:
7106 /* Mark as an IO operation because we read the time. */
7107 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7110 gen_helper_mfc0_count(arg
, cpu_env
);
7112 * Break the TB to be able to take timer interrupts immediately
7113 * after reading count. DISAS_STOP isn't sufficient, we need to
7114 * ensure we break completely out of translated code.
7116 gen_save_pc(ctx
->base
.pc_next
+ 4);
7117 ctx
->base
.is_jmp
= DISAS_EXIT
;
7118 register_name
= "Count";
7120 case CP0_REG09__SAARI
:
7121 CP0_CHECK(ctx
->saar
);
7122 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
7123 register_name
= "SAARI";
7125 case CP0_REG09__SAAR
:
7126 CP0_CHECK(ctx
->saar
);
7127 gen_helper_mfc0_saar(arg
, cpu_env
);
7128 register_name
= "SAAR";
7131 goto cp0_unimplemented
;
7134 case CP0_REGISTER_10
:
7136 case CP0_REG10__ENTRYHI
:
7137 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
7138 tcg_gen_ext32s_tl(arg
, arg
);
7139 register_name
= "EntryHi";
7142 goto cp0_unimplemented
;
7145 case CP0_REGISTER_11
:
7147 case CP0_REG11__COMPARE
:
7148 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
7149 register_name
= "Compare";
7151 /* 6,7 are implementation dependent */
7153 goto cp0_unimplemented
;
7156 case CP0_REGISTER_12
:
7158 case CP0_REG12__STATUS
:
7159 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
7160 register_name
= "Status";
7162 case CP0_REG12__INTCTL
:
7163 check_insn(ctx
, ISA_MIPS_R2
);
7164 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
7165 register_name
= "IntCtl";
7167 case CP0_REG12__SRSCTL
:
7168 check_insn(ctx
, ISA_MIPS_R2
);
7169 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
7170 register_name
= "SRSCtl";
7172 case CP0_REG12__SRSMAP
:
7173 check_insn(ctx
, ISA_MIPS_R2
);
7174 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7175 register_name
= "SRSMap";
7178 goto cp0_unimplemented
;
7181 case CP0_REGISTER_13
:
7183 case CP0_REG13__CAUSE
:
7184 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
7185 register_name
= "Cause";
7188 goto cp0_unimplemented
;
7191 case CP0_REGISTER_14
:
7193 case CP0_REG14__EPC
:
7194 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7195 tcg_gen_ext32s_tl(arg
, arg
);
7196 register_name
= "EPC";
7199 goto cp0_unimplemented
;
7202 case CP0_REGISTER_15
:
7204 case CP0_REG15__PRID
:
7205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
7206 register_name
= "PRid";
7208 case CP0_REG15__EBASE
:
7209 check_insn(ctx
, ISA_MIPS_R2
);
7210 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
7211 tcg_gen_ext32s_tl(arg
, arg
);
7212 register_name
= "EBase";
7214 case CP0_REG15__CMGCRBASE
:
7215 check_insn(ctx
, ISA_MIPS_R2
);
7216 CP0_CHECK(ctx
->cmgcr
);
7217 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
7218 tcg_gen_ext32s_tl(arg
, arg
);
7219 register_name
= "CMGCRBase";
7222 goto cp0_unimplemented
;
7225 case CP0_REGISTER_16
:
7227 case CP0_REG16__CONFIG
:
7228 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
7229 register_name
= "Config";
7231 case CP0_REG16__CONFIG1
:
7232 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
7233 register_name
= "Config1";
7235 case CP0_REG16__CONFIG2
:
7236 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
7237 register_name
= "Config2";
7239 case CP0_REG16__CONFIG3
:
7240 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
7241 register_name
= "Config3";
7243 case CP0_REG16__CONFIG4
:
7244 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
7245 register_name
= "Config4";
7247 case CP0_REG16__CONFIG5
:
7248 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
7249 register_name
= "Config5";
7251 /* 6,7 are implementation dependent */
7252 case CP0_REG16__CONFIG6
:
7253 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
7254 register_name
= "Config6";
7256 case CP0_REG16__CONFIG7
:
7257 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
7258 register_name
= "Config7";
7261 goto cp0_unimplemented
;
7264 case CP0_REGISTER_17
:
7266 case CP0_REG17__LLADDR
:
7267 gen_helper_mfc0_lladdr(arg
, cpu_env
);
7268 register_name
= "LLAddr";
7270 case CP0_REG17__MAAR
:
7271 CP0_CHECK(ctx
->mrp
);
7272 gen_helper_mfc0_maar(arg
, cpu_env
);
7273 register_name
= "MAAR";
7275 case CP0_REG17__MAARI
:
7276 CP0_CHECK(ctx
->mrp
);
7277 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
7278 register_name
= "MAARI";
7281 goto cp0_unimplemented
;
7284 case CP0_REGISTER_18
:
7286 case CP0_REG18__WATCHLO0
:
7287 case CP0_REG18__WATCHLO1
:
7288 case CP0_REG18__WATCHLO2
:
7289 case CP0_REG18__WATCHLO3
:
7290 case CP0_REG18__WATCHLO4
:
7291 case CP0_REG18__WATCHLO5
:
7292 case CP0_REG18__WATCHLO6
:
7293 case CP0_REG18__WATCHLO7
:
7294 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7295 gen_helper_1e0i(mfc0_watchlo
, arg
, sel
);
7296 register_name
= "WatchLo";
7299 goto cp0_unimplemented
;
7302 case CP0_REGISTER_19
:
7304 case CP0_REG19__WATCHHI0
:
7305 case CP0_REG19__WATCHHI1
:
7306 case CP0_REG19__WATCHHI2
:
7307 case CP0_REG19__WATCHHI3
:
7308 case CP0_REG19__WATCHHI4
:
7309 case CP0_REG19__WATCHHI5
:
7310 case CP0_REG19__WATCHHI6
:
7311 case CP0_REG19__WATCHHI7
:
7312 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
7313 gen_helper_1e0i(mfc0_watchhi
, arg
, sel
);
7314 register_name
= "WatchHi";
7317 goto cp0_unimplemented
;
7320 case CP0_REGISTER_20
:
7322 case CP0_REG20__XCONTEXT
:
7323 #if defined(TARGET_MIPS64)
7324 check_insn(ctx
, ISA_MIPS3
);
7325 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
7326 tcg_gen_ext32s_tl(arg
, arg
);
7327 register_name
= "XContext";
7331 goto cp0_unimplemented
;
7334 case CP0_REGISTER_21
:
7335 /* Officially reserved, but sel 0 is used for R1x000 framemask */
7336 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
7339 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
7340 register_name
= "Framemask";
7343 goto cp0_unimplemented
;
7346 case CP0_REGISTER_22
:
7347 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7348 register_name
= "'Diagnostic"; /* implementation dependent */
7350 case CP0_REGISTER_23
:
7352 case CP0_REG23__DEBUG
:
7353 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
7354 register_name
= "Debug";
7356 case CP0_REG23__TRACECONTROL
:
7357 /* PDtrace support */
7358 /* gen_helper_mfc0_tracecontrol(arg); */
7359 register_name
= "TraceControl";
7360 goto cp0_unimplemented
;
7361 case CP0_REG23__TRACECONTROL2
:
7362 /* PDtrace support */
7363 /* gen_helper_mfc0_tracecontrol2(arg); */
7364 register_name
= "TraceControl2";
7365 goto cp0_unimplemented
;
7366 case CP0_REG23__USERTRACEDATA1
:
7367 /* PDtrace support */
7368 /* gen_helper_mfc0_usertracedata1(arg);*/
7369 register_name
= "UserTraceData1";
7370 goto cp0_unimplemented
;
7371 case CP0_REG23__TRACEIBPC
:
7372 /* PDtrace support */
7373 /* gen_helper_mfc0_traceibpc(arg); */
7374 register_name
= "TraceIBPC";
7375 goto cp0_unimplemented
;
7376 case CP0_REG23__TRACEDBPC
:
7377 /* PDtrace support */
7378 /* gen_helper_mfc0_tracedbpc(arg); */
7379 register_name
= "TraceDBPC";
7380 goto cp0_unimplemented
;
7382 goto cp0_unimplemented
;
7385 case CP0_REGISTER_24
:
7387 case CP0_REG24__DEPC
:
7389 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
7390 tcg_gen_ext32s_tl(arg
, arg
);
7391 register_name
= "DEPC";
7394 goto cp0_unimplemented
;
7397 case CP0_REGISTER_25
:
7399 case CP0_REG25__PERFCTL0
:
7400 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
7401 register_name
= "Performance0";
7403 case CP0_REG25__PERFCNT0
:
7404 /* gen_helper_mfc0_performance1(arg); */
7405 register_name
= "Performance1";
7406 goto cp0_unimplemented
;
7407 case CP0_REG25__PERFCTL1
:
7408 /* gen_helper_mfc0_performance2(arg); */
7409 register_name
= "Performance2";
7410 goto cp0_unimplemented
;
7411 case CP0_REG25__PERFCNT1
:
7412 /* gen_helper_mfc0_performance3(arg); */
7413 register_name
= "Performance3";
7414 goto cp0_unimplemented
;
7415 case CP0_REG25__PERFCTL2
:
7416 /* gen_helper_mfc0_performance4(arg); */
7417 register_name
= "Performance4";
7418 goto cp0_unimplemented
;
7419 case CP0_REG25__PERFCNT2
:
7420 /* gen_helper_mfc0_performance5(arg); */
7421 register_name
= "Performance5";
7422 goto cp0_unimplemented
;
7423 case CP0_REG25__PERFCTL3
:
7424 /* gen_helper_mfc0_performance6(arg); */
7425 register_name
= "Performance6";
7426 goto cp0_unimplemented
;
7427 case CP0_REG25__PERFCNT3
:
7428 /* gen_helper_mfc0_performance7(arg); */
7429 register_name
= "Performance7";
7430 goto cp0_unimplemented
;
7432 goto cp0_unimplemented
;
7435 case CP0_REGISTER_26
:
7437 case CP0_REG26__ERRCTL
:
7438 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
7439 register_name
= "ErrCtl";
7442 goto cp0_unimplemented
;
7445 case CP0_REGISTER_27
:
7447 case CP0_REG27__CACHERR
:
7448 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
7449 register_name
= "CacheErr";
7452 goto cp0_unimplemented
;
7455 case CP0_REGISTER_28
:
7457 case CP0_REG28__TAGLO
:
7458 case CP0_REG28__TAGLO1
:
7459 case CP0_REG28__TAGLO2
:
7460 case CP0_REG28__TAGLO3
:
7462 TCGv_i64 tmp
= tcg_temp_new_i64();
7463 tcg_gen_ld_i64(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_TagLo
));
7464 gen_move_low32(arg
, tmp
);
7465 tcg_temp_free_i64(tmp
);
7467 register_name
= "TagLo";
7469 case CP0_REG28__DATALO
:
7470 case CP0_REG28__DATALO1
:
7471 case CP0_REG28__DATALO2
:
7472 case CP0_REG28__DATALO3
:
7473 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
7474 register_name
= "DataLo";
7477 goto cp0_unimplemented
;
7480 case CP0_REGISTER_29
:
7482 case CP0_REG29__TAGHI
:
7483 case CP0_REG29__TAGHI1
:
7484 case CP0_REG29__TAGHI2
:
7485 case CP0_REG29__TAGHI3
:
7486 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
7487 register_name
= "TagHi";
7489 case CP0_REG29__DATAHI
:
7490 case CP0_REG29__DATAHI1
:
7491 case CP0_REG29__DATAHI2
:
7492 case CP0_REG29__DATAHI3
:
7493 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
7494 register_name
= "DataHi";
7497 goto cp0_unimplemented
;
7500 case CP0_REGISTER_30
:
7502 case CP0_REG30__ERROREPC
:
7503 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
7504 tcg_gen_ext32s_tl(arg
, arg
);
7505 register_name
= "ErrorEPC";
7508 goto cp0_unimplemented
;
7511 case CP0_REGISTER_31
:
7513 case CP0_REG31__DESAVE
:
7515 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
7516 register_name
= "DESAVE";
7518 case CP0_REG31__KSCRATCH1
:
7519 case CP0_REG31__KSCRATCH2
:
7520 case CP0_REG31__KSCRATCH3
:
7521 case CP0_REG31__KSCRATCH4
:
7522 case CP0_REG31__KSCRATCH5
:
7523 case CP0_REG31__KSCRATCH6
:
7524 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
7525 tcg_gen_ld_tl(arg
, cpu_env
,
7526 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
7527 tcg_gen_ext32s_tl(arg
, arg
);
7528 register_name
= "KScratch";
7531 goto cp0_unimplemented
;
7535 goto cp0_unimplemented
;
7537 trace_mips_translate_c0("mfc0", register_name
, reg
, sel
);
7541 qemu_log_mask(LOG_UNIMP
, "mfc0 %s (reg %d sel %d)\n",
7542 register_name
, reg
, sel
);
7543 gen_mfc0_unimplemented(ctx
, arg
);
7546 static void gen_mtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
7548 const char *register_name
= "invalid";
7551 check_insn(ctx
, ISA_MIPS_R1
);
7554 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
7559 case CP0_REGISTER_00
:
7561 case CP0_REG00__INDEX
:
7562 gen_helper_mtc0_index(cpu_env
, arg
);
7563 register_name
= "Index";
7565 case CP0_REG00__MVPCONTROL
:
7566 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7567 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
7568 register_name
= "MVPControl";
7570 case CP0_REG00__MVPCONF0
:
7571 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7573 register_name
= "MVPConf0";
7575 case CP0_REG00__MVPCONF1
:
7576 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7578 register_name
= "MVPConf1";
7580 case CP0_REG00__VPCONTROL
:
7583 register_name
= "VPControl";
7586 goto cp0_unimplemented
;
7589 case CP0_REGISTER_01
:
7591 case CP0_REG01__RANDOM
:
7593 register_name
= "Random";
7595 case CP0_REG01__VPECONTROL
:
7596 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7597 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
7598 register_name
= "VPEControl";
7600 case CP0_REG01__VPECONF0
:
7601 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7602 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
7603 register_name
= "VPEConf0";
7605 case CP0_REG01__VPECONF1
:
7606 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7607 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
7608 register_name
= "VPEConf1";
7610 case CP0_REG01__YQMASK
:
7611 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7612 gen_helper_mtc0_yqmask(cpu_env
, arg
);
7613 register_name
= "YQMask";
7615 case CP0_REG01__VPESCHEDULE
:
7616 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7617 tcg_gen_st_tl(arg
, cpu_env
,
7618 offsetof(CPUMIPSState
, CP0_VPESchedule
));
7619 register_name
= "VPESchedule";
7621 case CP0_REG01__VPESCHEFBACK
:
7622 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7623 tcg_gen_st_tl(arg
, cpu_env
,
7624 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
7625 register_name
= "VPEScheFBack";
7627 case CP0_REG01__VPEOPT
:
7628 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7629 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
7630 register_name
= "VPEOpt";
7633 goto cp0_unimplemented
;
7636 case CP0_REGISTER_02
:
7638 case CP0_REG02__ENTRYLO0
:
7639 gen_helper_mtc0_entrylo0(cpu_env
, arg
);
7640 register_name
= "EntryLo0";
7642 case CP0_REG02__TCSTATUS
:
7643 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7644 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
7645 register_name
= "TCStatus";
7647 case CP0_REG02__TCBIND
:
7648 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7649 gen_helper_mtc0_tcbind(cpu_env
, arg
);
7650 register_name
= "TCBind";
7652 case CP0_REG02__TCRESTART
:
7653 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7654 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
7655 register_name
= "TCRestart";
7657 case CP0_REG02__TCHALT
:
7658 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7659 gen_helper_mtc0_tchalt(cpu_env
, arg
);
7660 register_name
= "TCHalt";
7662 case CP0_REG02__TCCONTEXT
:
7663 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7664 gen_helper_mtc0_tccontext(cpu_env
, arg
);
7665 register_name
= "TCContext";
7667 case CP0_REG02__TCSCHEDULE
:
7668 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7669 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
7670 register_name
= "TCSchedule";
7672 case CP0_REG02__TCSCHEFBACK
:
7673 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
7674 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
7675 register_name
= "TCScheFBack";
7678 goto cp0_unimplemented
;
7681 case CP0_REGISTER_03
:
7683 case CP0_REG03__ENTRYLO1
:
7684 gen_helper_mtc0_entrylo1(cpu_env
, arg
);
7685 register_name
= "EntryLo1";
7687 case CP0_REG03__GLOBALNUM
:
7690 register_name
= "GlobalNumber";
7693 goto cp0_unimplemented
;
7696 case CP0_REGISTER_04
:
7698 case CP0_REG04__CONTEXT
:
7699 gen_helper_mtc0_context(cpu_env
, arg
);
7700 register_name
= "Context";
7702 case CP0_REG04__CONTEXTCONFIG
:
7704 /* gen_helper_mtc0_contextconfig(arg); */
7705 register_name
= "ContextConfig";
7706 goto cp0_unimplemented
;
7707 case CP0_REG04__USERLOCAL
:
7708 CP0_CHECK(ctx
->ulri
);
7709 tcg_gen_st_tl(arg
, cpu_env
,
7710 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
7711 register_name
= "UserLocal";
7713 case CP0_REG04__MMID
:
7715 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
7716 register_name
= "MMID";
7719 goto cp0_unimplemented
;
7722 case CP0_REGISTER_05
:
7724 case CP0_REG05__PAGEMASK
:
7725 gen_helper_mtc0_pagemask(cpu_env
, arg
);
7726 register_name
= "PageMask";
7728 case CP0_REG05__PAGEGRAIN
:
7729 check_insn(ctx
, ISA_MIPS_R2
);
7730 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
7731 register_name
= "PageGrain";
7732 ctx
->base
.is_jmp
= DISAS_STOP
;
7734 case CP0_REG05__SEGCTL0
:
7736 gen_helper_mtc0_segctl0(cpu_env
, arg
);
7737 register_name
= "SegCtl0";
7739 case CP0_REG05__SEGCTL1
:
7741 gen_helper_mtc0_segctl1(cpu_env
, arg
);
7742 register_name
= "SegCtl1";
7744 case CP0_REG05__SEGCTL2
:
7746 gen_helper_mtc0_segctl2(cpu_env
, arg
);
7747 register_name
= "SegCtl2";
7749 case CP0_REG05__PWBASE
:
7751 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_PWBase
));
7752 register_name
= "PWBase";
7754 case CP0_REG05__PWFIELD
:
7756 gen_helper_mtc0_pwfield(cpu_env
, arg
);
7757 register_name
= "PWField";
7759 case CP0_REG05__PWSIZE
:
7761 gen_helper_mtc0_pwsize(cpu_env
, arg
);
7762 register_name
= "PWSize";
7765 goto cp0_unimplemented
;
7768 case CP0_REGISTER_06
:
7770 case CP0_REG06__WIRED
:
7771 gen_helper_mtc0_wired(cpu_env
, arg
);
7772 register_name
= "Wired";
7774 case CP0_REG06__SRSCONF0
:
7775 check_insn(ctx
, ISA_MIPS_R2
);
7776 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
7777 register_name
= "SRSConf0";
7779 case CP0_REG06__SRSCONF1
:
7780 check_insn(ctx
, ISA_MIPS_R2
);
7781 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
7782 register_name
= "SRSConf1";
7784 case CP0_REG06__SRSCONF2
:
7785 check_insn(ctx
, ISA_MIPS_R2
);
7786 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
7787 register_name
= "SRSConf2";
7789 case CP0_REG06__SRSCONF3
:
7790 check_insn(ctx
, ISA_MIPS_R2
);
7791 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
7792 register_name
= "SRSConf3";
7794 case CP0_REG06__SRSCONF4
:
7795 check_insn(ctx
, ISA_MIPS_R2
);
7796 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
7797 register_name
= "SRSConf4";
7799 case CP0_REG06__PWCTL
:
7801 gen_helper_mtc0_pwctl(cpu_env
, arg
);
7802 register_name
= "PWCtl";
7805 goto cp0_unimplemented
;
7808 case CP0_REGISTER_07
:
7810 case CP0_REG07__HWRENA
:
7811 check_insn(ctx
, ISA_MIPS_R2
);
7812 gen_helper_mtc0_hwrena(cpu_env
, arg
);
7813 ctx
->base
.is_jmp
= DISAS_STOP
;
7814 register_name
= "HWREna";
7817 goto cp0_unimplemented
;
7820 case CP0_REGISTER_08
:
7822 case CP0_REG08__BADVADDR
:
7824 register_name
= "BadVAddr";
7826 case CP0_REG08__BADINSTR
:
7828 register_name
= "BadInstr";
7830 case CP0_REG08__BADINSTRP
:
7832 register_name
= "BadInstrP";
7834 case CP0_REG08__BADINSTRX
:
7836 register_name
= "BadInstrX";
7839 goto cp0_unimplemented
;
7842 case CP0_REGISTER_09
:
7844 case CP0_REG09__COUNT
:
7845 gen_helper_mtc0_count(cpu_env
, arg
);
7846 register_name
= "Count";
7848 case CP0_REG09__SAARI
:
7849 CP0_CHECK(ctx
->saar
);
7850 gen_helper_mtc0_saari(cpu_env
, arg
);
7851 register_name
= "SAARI";
7853 case CP0_REG09__SAAR
:
7854 CP0_CHECK(ctx
->saar
);
7855 gen_helper_mtc0_saar(cpu_env
, arg
);
7856 register_name
= "SAAR";
7859 goto cp0_unimplemented
;
7862 case CP0_REGISTER_10
:
7864 case CP0_REG10__ENTRYHI
:
7865 gen_helper_mtc0_entryhi(cpu_env
, arg
);
7866 register_name
= "EntryHi";
7869 goto cp0_unimplemented
;
7872 case CP0_REGISTER_11
:
7874 case CP0_REG11__COMPARE
:
7875 gen_helper_mtc0_compare(cpu_env
, arg
);
7876 register_name
= "Compare";
7878 /* 6,7 are implementation dependent */
7880 goto cp0_unimplemented
;
7883 case CP0_REGISTER_12
:
7885 case CP0_REG12__STATUS
:
7886 save_cpu_state(ctx
, 1);
7887 gen_helper_mtc0_status(cpu_env
, arg
);
7888 /* DISAS_STOP isn't good enough here, hflags may have changed. */
7889 gen_save_pc(ctx
->base
.pc_next
+ 4);
7890 ctx
->base
.is_jmp
= DISAS_EXIT
;
7891 register_name
= "Status";
7893 case CP0_REG12__INTCTL
:
7894 check_insn(ctx
, ISA_MIPS_R2
);
7895 gen_helper_mtc0_intctl(cpu_env
, arg
);
7896 /* Stop translation as we may have switched the execution mode */
7897 ctx
->base
.is_jmp
= DISAS_STOP
;
7898 register_name
= "IntCtl";
7900 case CP0_REG12__SRSCTL
:
7901 check_insn(ctx
, ISA_MIPS_R2
);
7902 gen_helper_mtc0_srsctl(cpu_env
, arg
);
7903 /* Stop translation as we may have switched the execution mode */
7904 ctx
->base
.is_jmp
= DISAS_STOP
;
7905 register_name
= "SRSCtl";
7907 case CP0_REG12__SRSMAP
:
7908 check_insn(ctx
, ISA_MIPS_R2
);
7909 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
7910 /* Stop translation as we may have switched the execution mode */
7911 ctx
->base
.is_jmp
= DISAS_STOP
;
7912 register_name
= "SRSMap";
7915 goto cp0_unimplemented
;
7918 case CP0_REGISTER_13
:
7920 case CP0_REG13__CAUSE
:
7921 save_cpu_state(ctx
, 1);
7922 gen_helper_mtc0_cause(cpu_env
, arg
);
7924 * Stop translation as we may have triggered an interrupt.
7925 * DISAS_STOP isn't sufficient, we need to ensure we break out of
7926 * translated code to check for pending interrupts.
7928 gen_save_pc(ctx
->base
.pc_next
+ 4);
7929 ctx
->base
.is_jmp
= DISAS_EXIT
;
7930 register_name
= "Cause";
7933 goto cp0_unimplemented
;
7936 case CP0_REGISTER_14
:
7938 case CP0_REG14__EPC
:
7939 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
7940 register_name
= "EPC";
7943 goto cp0_unimplemented
;
7946 case CP0_REGISTER_15
:
7948 case CP0_REG15__PRID
:
7950 register_name
= "PRid";
7952 case CP0_REG15__EBASE
:
7953 check_insn(ctx
, ISA_MIPS_R2
);
7954 gen_helper_mtc0_ebase(cpu_env
, arg
);
7955 register_name
= "EBase";
7958 goto cp0_unimplemented
;
7961 case CP0_REGISTER_16
:
7963 case CP0_REG16__CONFIG
:
7964 gen_helper_mtc0_config0(cpu_env
, arg
);
7965 register_name
= "Config";
7966 /* Stop translation as we may have switched the execution mode */
7967 ctx
->base
.is_jmp
= DISAS_STOP
;
7969 case CP0_REG16__CONFIG1
:
7970 /* ignored, read only */
7971 register_name
= "Config1";
7973 case CP0_REG16__CONFIG2
:
7974 gen_helper_mtc0_config2(cpu_env
, arg
);
7975 register_name
= "Config2";
7976 /* Stop translation as we may have switched the execution mode */
7977 ctx
->base
.is_jmp
= DISAS_STOP
;
7979 case CP0_REG16__CONFIG3
:
7980 gen_helper_mtc0_config3(cpu_env
, arg
);
7981 register_name
= "Config3";
7982 /* Stop translation as we may have switched the execution mode */
7983 ctx
->base
.is_jmp
= DISAS_STOP
;
7985 case CP0_REG16__CONFIG4
:
7986 gen_helper_mtc0_config4(cpu_env
, arg
);
7987 register_name
= "Config4";
7988 ctx
->base
.is_jmp
= DISAS_STOP
;
7990 case CP0_REG16__CONFIG5
:
7991 gen_helper_mtc0_config5(cpu_env
, arg
);
7992 register_name
= "Config5";
7993 /* Stop translation as we may have switched the execution mode */
7994 ctx
->base
.is_jmp
= DISAS_STOP
;
7996 /* 6,7 are implementation dependent */
7997 case CP0_REG16__CONFIG6
:
7999 register_name
= "Config6";
8001 case CP0_REG16__CONFIG7
:
8003 register_name
= "Config7";
8006 register_name
= "Invalid config selector";
8007 goto cp0_unimplemented
;
8010 case CP0_REGISTER_17
:
8012 case CP0_REG17__LLADDR
:
8013 gen_helper_mtc0_lladdr(cpu_env
, arg
);
8014 register_name
= "LLAddr";
8016 case CP0_REG17__MAAR
:
8017 CP0_CHECK(ctx
->mrp
);
8018 gen_helper_mtc0_maar(cpu_env
, arg
);
8019 register_name
= "MAAR";
8021 case CP0_REG17__MAARI
:
8022 CP0_CHECK(ctx
->mrp
);
8023 gen_helper_mtc0_maari(cpu_env
, arg
);
8024 register_name
= "MAARI";
8027 goto cp0_unimplemented
;
8030 case CP0_REGISTER_18
:
8032 case CP0_REG18__WATCHLO0
:
8033 case CP0_REG18__WATCHLO1
:
8034 case CP0_REG18__WATCHLO2
:
8035 case CP0_REG18__WATCHLO3
:
8036 case CP0_REG18__WATCHLO4
:
8037 case CP0_REG18__WATCHLO5
:
8038 case CP0_REG18__WATCHLO6
:
8039 case CP0_REG18__WATCHLO7
:
8040 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8041 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
8042 register_name
= "WatchLo";
8045 goto cp0_unimplemented
;
8048 case CP0_REGISTER_19
:
8050 case CP0_REG19__WATCHHI0
:
8051 case CP0_REG19__WATCHHI1
:
8052 case CP0_REG19__WATCHHI2
:
8053 case CP0_REG19__WATCHHI3
:
8054 case CP0_REG19__WATCHHI4
:
8055 case CP0_REG19__WATCHHI5
:
8056 case CP0_REG19__WATCHHI6
:
8057 case CP0_REG19__WATCHHI7
:
8058 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8059 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
8060 register_name
= "WatchHi";
8063 goto cp0_unimplemented
;
8066 case CP0_REGISTER_20
:
8068 case CP0_REG20__XCONTEXT
:
8069 #if defined(TARGET_MIPS64)
8070 check_insn(ctx
, ISA_MIPS3
);
8071 gen_helper_mtc0_xcontext(cpu_env
, arg
);
8072 register_name
= "XContext";
8076 goto cp0_unimplemented
;
8079 case CP0_REGISTER_21
:
8080 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8081 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8084 gen_helper_mtc0_framemask(cpu_env
, arg
);
8085 register_name
= "Framemask";
8088 goto cp0_unimplemented
;
8091 case CP0_REGISTER_22
:
8093 register_name
= "Diagnostic"; /* implementation dependent */
8095 case CP0_REGISTER_23
:
8097 case CP0_REG23__DEBUG
:
8098 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
8099 /* DISAS_STOP isn't good enough here, hflags may have changed. */
8100 gen_save_pc(ctx
->base
.pc_next
+ 4);
8101 ctx
->base
.is_jmp
= DISAS_EXIT
;
8102 register_name
= "Debug";
8104 case CP0_REG23__TRACECONTROL
:
8105 /* PDtrace support */
8106 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
8107 register_name
= "TraceControl";
8108 /* Stop translation as we may have switched the execution mode */
8109 ctx
->base
.is_jmp
= DISAS_STOP
;
8110 goto cp0_unimplemented
;
8111 case CP0_REG23__TRACECONTROL2
:
8112 /* PDtrace support */
8113 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
8114 register_name
= "TraceControl2";
8115 /* Stop translation as we may have switched the execution mode */
8116 ctx
->base
.is_jmp
= DISAS_STOP
;
8117 goto cp0_unimplemented
;
8118 case CP0_REG23__USERTRACEDATA1
:
8119 /* Stop translation as we may have switched the execution mode */
8120 ctx
->base
.is_jmp
= DISAS_STOP
;
8121 /* PDtrace support */
8122 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
8123 register_name
= "UserTraceData";
8124 /* Stop translation as we may have switched the execution mode */
8125 ctx
->base
.is_jmp
= DISAS_STOP
;
8126 goto cp0_unimplemented
;
8127 case CP0_REG23__TRACEIBPC
:
8128 /* PDtrace support */
8129 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
8130 /* Stop translation as we may have switched the execution mode */
8131 ctx
->base
.is_jmp
= DISAS_STOP
;
8132 register_name
= "TraceIBPC";
8133 goto cp0_unimplemented
;
8134 case CP0_REG23__TRACEDBPC
:
8135 /* PDtrace support */
8136 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
8137 /* Stop translation as we may have switched the execution mode */
8138 ctx
->base
.is_jmp
= DISAS_STOP
;
8139 register_name
= "TraceDBPC";
8140 goto cp0_unimplemented
;
8142 goto cp0_unimplemented
;
8145 case CP0_REGISTER_24
:
8147 case CP0_REG24__DEPC
:
8149 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8150 register_name
= "DEPC";
8153 goto cp0_unimplemented
;
8156 case CP0_REGISTER_25
:
8158 case CP0_REG25__PERFCTL0
:
8159 gen_helper_mtc0_performance0(cpu_env
, arg
);
8160 register_name
= "Performance0";
8162 case CP0_REG25__PERFCNT0
:
8163 /* gen_helper_mtc0_performance1(arg); */
8164 register_name
= "Performance1";
8165 goto cp0_unimplemented
;
8166 case CP0_REG25__PERFCTL1
:
8167 /* gen_helper_mtc0_performance2(arg); */
8168 register_name
= "Performance2";
8169 goto cp0_unimplemented
;
8170 case CP0_REG25__PERFCNT1
:
8171 /* gen_helper_mtc0_performance3(arg); */
8172 register_name
= "Performance3";
8173 goto cp0_unimplemented
;
8174 case CP0_REG25__PERFCTL2
:
8175 /* gen_helper_mtc0_performance4(arg); */
8176 register_name
= "Performance4";
8177 goto cp0_unimplemented
;
8178 case CP0_REG25__PERFCNT2
:
8179 /* gen_helper_mtc0_performance5(arg); */
8180 register_name
= "Performance5";
8181 goto cp0_unimplemented
;
8182 case CP0_REG25__PERFCTL3
:
8183 /* gen_helper_mtc0_performance6(arg); */
8184 register_name
= "Performance6";
8185 goto cp0_unimplemented
;
8186 case CP0_REG25__PERFCNT3
:
8187 /* gen_helper_mtc0_performance7(arg); */
8188 register_name
= "Performance7";
8189 goto cp0_unimplemented
;
8191 goto cp0_unimplemented
;
8194 case CP0_REGISTER_26
:
8196 case CP0_REG26__ERRCTL
:
8197 gen_helper_mtc0_errctl(cpu_env
, arg
);
8198 ctx
->base
.is_jmp
= DISAS_STOP
;
8199 register_name
= "ErrCtl";
8202 goto cp0_unimplemented
;
8205 case CP0_REGISTER_27
:
8207 case CP0_REG27__CACHERR
:
8209 register_name
= "CacheErr";
8212 goto cp0_unimplemented
;
8215 case CP0_REGISTER_28
:
8217 case CP0_REG28__TAGLO
:
8218 case CP0_REG28__TAGLO1
:
8219 case CP0_REG28__TAGLO2
:
8220 case CP0_REG28__TAGLO3
:
8221 gen_helper_mtc0_taglo(cpu_env
, arg
);
8222 register_name
= "TagLo";
8224 case CP0_REG28__DATALO
:
8225 case CP0_REG28__DATALO1
:
8226 case CP0_REG28__DATALO2
:
8227 case CP0_REG28__DATALO3
:
8228 gen_helper_mtc0_datalo(cpu_env
, arg
);
8229 register_name
= "DataLo";
8232 goto cp0_unimplemented
;
8235 case CP0_REGISTER_29
:
8237 case CP0_REG29__TAGHI
:
8238 case CP0_REG29__TAGHI1
:
8239 case CP0_REG29__TAGHI2
:
8240 case CP0_REG29__TAGHI3
:
8241 gen_helper_mtc0_taghi(cpu_env
, arg
);
8242 register_name
= "TagHi";
8244 case CP0_REG29__DATAHI
:
8245 case CP0_REG29__DATAHI1
:
8246 case CP0_REG29__DATAHI2
:
8247 case CP0_REG29__DATAHI3
:
8248 gen_helper_mtc0_datahi(cpu_env
, arg
);
8249 register_name
= "DataHi";
8252 register_name
= "invalid sel";
8253 goto cp0_unimplemented
;
8256 case CP0_REGISTER_30
:
8258 case CP0_REG30__ERROREPC
:
8259 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8260 register_name
= "ErrorEPC";
8263 goto cp0_unimplemented
;
8266 case CP0_REGISTER_31
:
8268 case CP0_REG31__DESAVE
:
8270 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
8271 register_name
= "DESAVE";
8273 case CP0_REG31__KSCRATCH1
:
8274 case CP0_REG31__KSCRATCH2
:
8275 case CP0_REG31__KSCRATCH3
:
8276 case CP0_REG31__KSCRATCH4
:
8277 case CP0_REG31__KSCRATCH5
:
8278 case CP0_REG31__KSCRATCH6
:
8279 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
8280 tcg_gen_st_tl(arg
, cpu_env
,
8281 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
8282 register_name
= "KScratch";
8285 goto cp0_unimplemented
;
8289 goto cp0_unimplemented
;
8291 trace_mips_translate_c0("mtc0", register_name
, reg
, sel
);
8293 /* For simplicity assume that all writes can cause interrupts. */
8294 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8296 * DISAS_STOP isn't sufficient, we need to ensure we break out of
8297 * translated code to check for pending interrupts.
8299 gen_save_pc(ctx
->base
.pc_next
+ 4);
8300 ctx
->base
.is_jmp
= DISAS_EXIT
;
8305 qemu_log_mask(LOG_UNIMP
, "mtc0 %s (reg %d sel %d)\n",
8306 register_name
, reg
, sel
);
8309 #if defined(TARGET_MIPS64)
8310 static void gen_dmfc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
8312 const char *register_name
= "invalid";
8315 check_insn(ctx
, ISA_MIPS_R1
);
8319 case CP0_REGISTER_00
:
8321 case CP0_REG00__INDEX
:
8322 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Index
));
8323 register_name
= "Index";
8325 case CP0_REG00__MVPCONTROL
:
8326 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8327 gen_helper_mfc0_mvpcontrol(arg
, cpu_env
);
8328 register_name
= "MVPControl";
8330 case CP0_REG00__MVPCONF0
:
8331 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8332 gen_helper_mfc0_mvpconf0(arg
, cpu_env
);
8333 register_name
= "MVPConf0";
8335 case CP0_REG00__MVPCONF1
:
8336 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8337 gen_helper_mfc0_mvpconf1(arg
, cpu_env
);
8338 register_name
= "MVPConf1";
8340 case CP0_REG00__VPCONTROL
:
8342 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPControl
));
8343 register_name
= "VPControl";
8346 goto cp0_unimplemented
;
8349 case CP0_REGISTER_01
:
8351 case CP0_REG01__RANDOM
:
8352 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8353 gen_helper_mfc0_random(arg
, cpu_env
);
8354 register_name
= "Random";
8356 case CP0_REG01__VPECONTROL
:
8357 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8358 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEControl
));
8359 register_name
= "VPEControl";
8361 case CP0_REG01__VPECONF0
:
8362 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8363 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf0
));
8364 register_name
= "VPEConf0";
8366 case CP0_REG01__VPECONF1
:
8367 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8368 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEConf1
));
8369 register_name
= "VPEConf1";
8371 case CP0_REG01__YQMASK
:
8372 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8373 tcg_gen_ld_tl(arg
, cpu_env
,
8374 offsetof(CPUMIPSState
, CP0_YQMask
));
8375 register_name
= "YQMask";
8377 case CP0_REG01__VPESCHEDULE
:
8378 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8379 tcg_gen_ld_tl(arg
, cpu_env
,
8380 offsetof(CPUMIPSState
, CP0_VPESchedule
));
8381 register_name
= "VPESchedule";
8383 case CP0_REG01__VPESCHEFBACK
:
8384 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8385 tcg_gen_ld_tl(arg
, cpu_env
,
8386 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
8387 register_name
= "VPEScheFBack";
8389 case CP0_REG01__VPEOPT
:
8390 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8391 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_VPEOpt
));
8392 register_name
= "VPEOpt";
8395 goto cp0_unimplemented
;
8398 case CP0_REGISTER_02
:
8400 case CP0_REG02__ENTRYLO0
:
8401 tcg_gen_ld_tl(arg
, cpu_env
,
8402 offsetof(CPUMIPSState
, CP0_EntryLo0
));
8403 register_name
= "EntryLo0";
8405 case CP0_REG02__TCSTATUS
:
8406 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8407 gen_helper_mfc0_tcstatus(arg
, cpu_env
);
8408 register_name
= "TCStatus";
8410 case CP0_REG02__TCBIND
:
8411 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8412 gen_helper_mfc0_tcbind(arg
, cpu_env
);
8413 register_name
= "TCBind";
8415 case CP0_REG02__TCRESTART
:
8416 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8417 gen_helper_dmfc0_tcrestart(arg
, cpu_env
);
8418 register_name
= "TCRestart";
8420 case CP0_REG02__TCHALT
:
8421 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8422 gen_helper_dmfc0_tchalt(arg
, cpu_env
);
8423 register_name
= "TCHalt";
8425 case CP0_REG02__TCCONTEXT
:
8426 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8427 gen_helper_dmfc0_tccontext(arg
, cpu_env
);
8428 register_name
= "TCContext";
8430 case CP0_REG02__TCSCHEDULE
:
8431 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8432 gen_helper_dmfc0_tcschedule(arg
, cpu_env
);
8433 register_name
= "TCSchedule";
8435 case CP0_REG02__TCSCHEFBACK
:
8436 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
8437 gen_helper_dmfc0_tcschefback(arg
, cpu_env
);
8438 register_name
= "TCScheFBack";
8441 goto cp0_unimplemented
;
8444 case CP0_REGISTER_03
:
8446 case CP0_REG03__ENTRYLO1
:
8447 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryLo1
));
8448 register_name
= "EntryLo1";
8450 case CP0_REG03__GLOBALNUM
:
8452 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_GlobalNumber
));
8453 register_name
= "GlobalNumber";
8456 goto cp0_unimplemented
;
8459 case CP0_REGISTER_04
:
8461 case CP0_REG04__CONTEXT
:
8462 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_Context
));
8463 register_name
= "Context";
8465 case CP0_REG04__CONTEXTCONFIG
:
8467 /* gen_helper_dmfc0_contextconfig(arg); */
8468 register_name
= "ContextConfig";
8469 goto cp0_unimplemented
;
8470 case CP0_REG04__USERLOCAL
:
8471 CP0_CHECK(ctx
->ulri
);
8472 tcg_gen_ld_tl(arg
, cpu_env
,
8473 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
8474 register_name
= "UserLocal";
8476 case CP0_REG04__MMID
:
8478 gen_helper_mtc0_memorymapid(cpu_env
, arg
);
8479 register_name
= "MMID";
8482 goto cp0_unimplemented
;
8485 case CP0_REGISTER_05
:
8487 case CP0_REG05__PAGEMASK
:
8488 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageMask
));
8489 register_name
= "PageMask";
8491 case CP0_REG05__PAGEGRAIN
:
8492 check_insn(ctx
, ISA_MIPS_R2
);
8493 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PageGrain
));
8494 register_name
= "PageGrain";
8496 case CP0_REG05__SEGCTL0
:
8498 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl0
));
8499 register_name
= "SegCtl0";
8501 case CP0_REG05__SEGCTL1
:
8503 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl1
));
8504 register_name
= "SegCtl1";
8506 case CP0_REG05__SEGCTL2
:
8508 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_SegCtl2
));
8509 register_name
= "SegCtl2";
8511 case CP0_REG05__PWBASE
:
8513 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
8514 register_name
= "PWBase";
8516 case CP0_REG05__PWFIELD
:
8518 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWField
));
8519 register_name
= "PWField";
8521 case CP0_REG05__PWSIZE
:
8523 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWSize
));
8524 register_name
= "PWSize";
8527 goto cp0_unimplemented
;
8530 case CP0_REGISTER_06
:
8532 case CP0_REG06__WIRED
:
8533 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Wired
));
8534 register_name
= "Wired";
8536 case CP0_REG06__SRSCONF0
:
8537 check_insn(ctx
, ISA_MIPS_R2
);
8538 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf0
));
8539 register_name
= "SRSConf0";
8541 case CP0_REG06__SRSCONF1
:
8542 check_insn(ctx
, ISA_MIPS_R2
);
8543 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf1
));
8544 register_name
= "SRSConf1";
8546 case CP0_REG06__SRSCONF2
:
8547 check_insn(ctx
, ISA_MIPS_R2
);
8548 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf2
));
8549 register_name
= "SRSConf2";
8551 case CP0_REG06__SRSCONF3
:
8552 check_insn(ctx
, ISA_MIPS_R2
);
8553 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf3
));
8554 register_name
= "SRSConf3";
8556 case CP0_REG06__SRSCONF4
:
8557 check_insn(ctx
, ISA_MIPS_R2
);
8558 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSConf4
));
8559 register_name
= "SRSConf4";
8561 case CP0_REG06__PWCTL
:
8563 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PWCtl
));
8564 register_name
= "PWCtl";
8567 goto cp0_unimplemented
;
8570 case CP0_REGISTER_07
:
8572 case CP0_REG07__HWRENA
:
8573 check_insn(ctx
, ISA_MIPS_R2
);
8574 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_HWREna
));
8575 register_name
= "HWREna";
8578 goto cp0_unimplemented
;
8581 case CP0_REGISTER_08
:
8583 case CP0_REG08__BADVADDR
:
8584 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
8585 register_name
= "BadVAddr";
8587 case CP0_REG08__BADINSTR
:
8589 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstr
));
8590 register_name
= "BadInstr";
8592 case CP0_REG08__BADINSTRP
:
8594 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrP
));
8595 register_name
= "BadInstrP";
8597 case CP0_REG08__BADINSTRX
:
8599 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_BadInstrX
));
8600 tcg_gen_andi_tl(arg
, arg
, ~0xffff);
8601 register_name
= "BadInstrX";
8604 goto cp0_unimplemented
;
8607 case CP0_REGISTER_09
:
8609 case CP0_REG09__COUNT
:
8610 /* Mark as an IO operation because we read the time. */
8611 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
8614 gen_helper_mfc0_count(arg
, cpu_env
);
8616 * Break the TB to be able to take timer interrupts immediately
8617 * after reading count. DISAS_STOP isn't sufficient, we need to
8618 * ensure we break completely out of translated code.
8620 gen_save_pc(ctx
->base
.pc_next
+ 4);
8621 ctx
->base
.is_jmp
= DISAS_EXIT
;
8622 register_name
= "Count";
8624 case CP0_REG09__SAARI
:
8625 CP0_CHECK(ctx
->saar
);
8626 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SAARI
));
8627 register_name
= "SAARI";
8629 case CP0_REG09__SAAR
:
8630 CP0_CHECK(ctx
->saar
);
8631 gen_helper_dmfc0_saar(arg
, cpu_env
);
8632 register_name
= "SAAR";
8635 goto cp0_unimplemented
;
8638 case CP0_REGISTER_10
:
8640 case CP0_REG10__ENTRYHI
:
8641 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EntryHi
));
8642 register_name
= "EntryHi";
8645 goto cp0_unimplemented
;
8648 case CP0_REGISTER_11
:
8650 case CP0_REG11__COMPARE
:
8651 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Compare
));
8652 register_name
= "Compare";
8654 /* 6,7 are implementation dependent */
8656 goto cp0_unimplemented
;
8659 case CP0_REGISTER_12
:
8661 case CP0_REG12__STATUS
:
8662 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Status
));
8663 register_name
= "Status";
8665 case CP0_REG12__INTCTL
:
8666 check_insn(ctx
, ISA_MIPS_R2
);
8667 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_IntCtl
));
8668 register_name
= "IntCtl";
8670 case CP0_REG12__SRSCTL
:
8671 check_insn(ctx
, ISA_MIPS_R2
);
8672 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSCtl
));
8673 register_name
= "SRSCtl";
8675 case CP0_REG12__SRSMAP
:
8676 check_insn(ctx
, ISA_MIPS_R2
);
8677 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
8678 register_name
= "SRSMap";
8681 goto cp0_unimplemented
;
8684 case CP0_REGISTER_13
:
8686 case CP0_REG13__CAUSE
:
8687 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Cause
));
8688 register_name
= "Cause";
8691 goto cp0_unimplemented
;
8694 case CP0_REGISTER_14
:
8696 case CP0_REG14__EPC
:
8697 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
8698 register_name
= "EPC";
8701 goto cp0_unimplemented
;
8704 case CP0_REGISTER_15
:
8706 case CP0_REG15__PRID
:
8707 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_PRid
));
8708 register_name
= "PRid";
8710 case CP0_REG15__EBASE
:
8711 check_insn(ctx
, ISA_MIPS_R2
);
8712 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EBase
));
8713 register_name
= "EBase";
8715 case CP0_REG15__CMGCRBASE
:
8716 check_insn(ctx
, ISA_MIPS_R2
);
8717 CP0_CHECK(ctx
->cmgcr
);
8718 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_CMGCRBase
));
8719 register_name
= "CMGCRBase";
8722 goto cp0_unimplemented
;
8725 case CP0_REGISTER_16
:
8727 case CP0_REG16__CONFIG
:
8728 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config0
));
8729 register_name
= "Config";
8731 case CP0_REG16__CONFIG1
:
8732 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config1
));
8733 register_name
= "Config1";
8735 case CP0_REG16__CONFIG2
:
8736 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config2
));
8737 register_name
= "Config2";
8739 case CP0_REG16__CONFIG3
:
8740 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config3
));
8741 register_name
= "Config3";
8743 case CP0_REG16__CONFIG4
:
8744 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config4
));
8745 register_name
= "Config4";
8747 case CP0_REG16__CONFIG5
:
8748 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config5
));
8749 register_name
= "Config5";
8751 /* 6,7 are implementation dependent */
8752 case CP0_REG16__CONFIG6
:
8753 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config6
));
8754 register_name
= "Config6";
8756 case CP0_REG16__CONFIG7
:
8757 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Config7
));
8758 register_name
= "Config7";
8761 goto cp0_unimplemented
;
8764 case CP0_REGISTER_17
:
8766 case CP0_REG17__LLADDR
:
8767 gen_helper_dmfc0_lladdr(arg
, cpu_env
);
8768 register_name
= "LLAddr";
8770 case CP0_REG17__MAAR
:
8771 CP0_CHECK(ctx
->mrp
);
8772 gen_helper_dmfc0_maar(arg
, cpu_env
);
8773 register_name
= "MAAR";
8775 case CP0_REG17__MAARI
:
8776 CP0_CHECK(ctx
->mrp
);
8777 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MAARI
));
8778 register_name
= "MAARI";
8781 goto cp0_unimplemented
;
8784 case CP0_REGISTER_18
:
8786 case CP0_REG18__WATCHLO0
:
8787 case CP0_REG18__WATCHLO1
:
8788 case CP0_REG18__WATCHLO2
:
8789 case CP0_REG18__WATCHLO3
:
8790 case CP0_REG18__WATCHLO4
:
8791 case CP0_REG18__WATCHLO5
:
8792 case CP0_REG18__WATCHLO6
:
8793 case CP0_REG18__WATCHLO7
:
8794 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8795 gen_helper_1e0i(dmfc0_watchlo
, arg
, sel
);
8796 register_name
= "WatchLo";
8799 goto cp0_unimplemented
;
8802 case CP0_REGISTER_19
:
8804 case CP0_REG19__WATCHHI0
:
8805 case CP0_REG19__WATCHHI1
:
8806 case CP0_REG19__WATCHHI2
:
8807 case CP0_REG19__WATCHHI3
:
8808 case CP0_REG19__WATCHHI4
:
8809 case CP0_REG19__WATCHHI5
:
8810 case CP0_REG19__WATCHHI6
:
8811 case CP0_REG19__WATCHHI7
:
8812 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
8813 gen_helper_1e0i(dmfc0_watchhi
, arg
, sel
);
8814 register_name
= "WatchHi";
8817 goto cp0_unimplemented
;
8820 case CP0_REGISTER_20
:
8822 case CP0_REG20__XCONTEXT
:
8823 check_insn(ctx
, ISA_MIPS3
);
8824 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_XContext
));
8825 register_name
= "XContext";
8828 goto cp0_unimplemented
;
8831 case CP0_REGISTER_21
:
8832 /* Officially reserved, but sel 0 is used for R1x000 framemask */
8833 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
8836 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Framemask
));
8837 register_name
= "Framemask";
8840 goto cp0_unimplemented
;
8843 case CP0_REGISTER_22
:
8844 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8845 register_name
= "'Diagnostic"; /* implementation dependent */
8847 case CP0_REGISTER_23
:
8849 case CP0_REG23__DEBUG
:
8850 gen_helper_mfc0_debug(arg
, cpu_env
); /* EJTAG support */
8851 register_name
= "Debug";
8853 case CP0_REG23__TRACECONTROL
:
8854 /* PDtrace support */
8855 /* gen_helper_dmfc0_tracecontrol(arg, cpu_env); */
8856 register_name
= "TraceControl";
8857 goto cp0_unimplemented
;
8858 case CP0_REG23__TRACECONTROL2
:
8859 /* PDtrace support */
8860 /* gen_helper_dmfc0_tracecontrol2(arg, cpu_env); */
8861 register_name
= "TraceControl2";
8862 goto cp0_unimplemented
;
8863 case CP0_REG23__USERTRACEDATA1
:
8864 /* PDtrace support */
8865 /* gen_helper_dmfc0_usertracedata1(arg, cpu_env);*/
8866 register_name
= "UserTraceData1";
8867 goto cp0_unimplemented
;
8868 case CP0_REG23__TRACEIBPC
:
8869 /* PDtrace support */
8870 /* gen_helper_dmfc0_traceibpc(arg, cpu_env); */
8871 register_name
= "TraceIBPC";
8872 goto cp0_unimplemented
;
8873 case CP0_REG23__TRACEDBPC
:
8874 /* PDtrace support */
8875 /* gen_helper_dmfc0_tracedbpc(arg, cpu_env); */
8876 register_name
= "TraceDBPC";
8877 goto cp0_unimplemented
;
8879 goto cp0_unimplemented
;
8882 case CP0_REGISTER_24
:
8884 case CP0_REG24__DEPC
:
8886 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
8887 register_name
= "DEPC";
8890 goto cp0_unimplemented
;
8893 case CP0_REGISTER_25
:
8895 case CP0_REG25__PERFCTL0
:
8896 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_Performance0
));
8897 register_name
= "Performance0";
8899 case CP0_REG25__PERFCNT0
:
8900 /* gen_helper_dmfc0_performance1(arg); */
8901 register_name
= "Performance1";
8902 goto cp0_unimplemented
;
8903 case CP0_REG25__PERFCTL1
:
8904 /* gen_helper_dmfc0_performance2(arg); */
8905 register_name
= "Performance2";
8906 goto cp0_unimplemented
;
8907 case CP0_REG25__PERFCNT1
:
8908 /* gen_helper_dmfc0_performance3(arg); */
8909 register_name
= "Performance3";
8910 goto cp0_unimplemented
;
8911 case CP0_REG25__PERFCTL2
:
8912 /* gen_helper_dmfc0_performance4(arg); */
8913 register_name
= "Performance4";
8914 goto cp0_unimplemented
;
8915 case CP0_REG25__PERFCNT2
:
8916 /* gen_helper_dmfc0_performance5(arg); */
8917 register_name
= "Performance5";
8918 goto cp0_unimplemented
;
8919 case CP0_REG25__PERFCTL3
:
8920 /* gen_helper_dmfc0_performance6(arg); */
8921 register_name
= "Performance6";
8922 goto cp0_unimplemented
;
8923 case CP0_REG25__PERFCNT3
:
8924 /* gen_helper_dmfc0_performance7(arg); */
8925 register_name
= "Performance7";
8926 goto cp0_unimplemented
;
8928 goto cp0_unimplemented
;
8931 case CP0_REGISTER_26
:
8933 case CP0_REG26__ERRCTL
:
8934 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_ErrCtl
));
8935 register_name
= "ErrCtl";
8938 goto cp0_unimplemented
;
8941 case CP0_REGISTER_27
:
8944 case CP0_REG27__CACHERR
:
8945 tcg_gen_movi_tl(arg
, 0); /* unimplemented */
8946 register_name
= "CacheErr";
8949 goto cp0_unimplemented
;
8952 case CP0_REGISTER_28
:
8954 case CP0_REG28__TAGLO
:
8955 case CP0_REG28__TAGLO1
:
8956 case CP0_REG28__TAGLO2
:
8957 case CP0_REG28__TAGLO3
:
8958 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagLo
));
8959 register_name
= "TagLo";
8961 case CP0_REG28__DATALO
:
8962 case CP0_REG28__DATALO1
:
8963 case CP0_REG28__DATALO2
:
8964 case CP0_REG28__DATALO3
:
8965 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataLo
));
8966 register_name
= "DataLo";
8969 goto cp0_unimplemented
;
8972 case CP0_REGISTER_29
:
8974 case CP0_REG29__TAGHI
:
8975 case CP0_REG29__TAGHI1
:
8976 case CP0_REG29__TAGHI2
:
8977 case CP0_REG29__TAGHI3
:
8978 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_TagHi
));
8979 register_name
= "TagHi";
8981 case CP0_REG29__DATAHI
:
8982 case CP0_REG29__DATAHI1
:
8983 case CP0_REG29__DATAHI2
:
8984 case CP0_REG29__DATAHI3
:
8985 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DataHi
));
8986 register_name
= "DataHi";
8989 goto cp0_unimplemented
;
8992 case CP0_REGISTER_30
:
8994 case CP0_REG30__ERROREPC
:
8995 tcg_gen_ld_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
8996 register_name
= "ErrorEPC";
8999 goto cp0_unimplemented
;
9002 case CP0_REGISTER_31
:
9004 case CP0_REG31__DESAVE
:
9006 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9007 register_name
= "DESAVE";
9009 case CP0_REG31__KSCRATCH1
:
9010 case CP0_REG31__KSCRATCH2
:
9011 case CP0_REG31__KSCRATCH3
:
9012 case CP0_REG31__KSCRATCH4
:
9013 case CP0_REG31__KSCRATCH5
:
9014 case CP0_REG31__KSCRATCH6
:
9015 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9016 tcg_gen_ld_tl(arg
, cpu_env
,
9017 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
9018 register_name
= "KScratch";
9021 goto cp0_unimplemented
;
9025 goto cp0_unimplemented
;
9027 trace_mips_translate_c0("dmfc0", register_name
, reg
, sel
);
9031 qemu_log_mask(LOG_UNIMP
, "dmfc0 %s (reg %d sel %d)\n",
9032 register_name
, reg
, sel
);
9033 gen_mfc0_unimplemented(ctx
, arg
);
9036 static void gen_dmtc0(DisasContext
*ctx
, TCGv arg
, int reg
, int sel
)
9038 const char *register_name
= "invalid";
9041 check_insn(ctx
, ISA_MIPS_R1
);
9044 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9049 case CP0_REGISTER_00
:
9051 case CP0_REG00__INDEX
:
9052 gen_helper_mtc0_index(cpu_env
, arg
);
9053 register_name
= "Index";
9055 case CP0_REG00__MVPCONTROL
:
9056 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9057 gen_helper_mtc0_mvpcontrol(cpu_env
, arg
);
9058 register_name
= "MVPControl";
9060 case CP0_REG00__MVPCONF0
:
9061 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9063 register_name
= "MVPConf0";
9065 case CP0_REG00__MVPCONF1
:
9066 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9068 register_name
= "MVPConf1";
9070 case CP0_REG00__VPCONTROL
:
9073 register_name
= "VPControl";
9076 goto cp0_unimplemented
;
9079 case CP0_REGISTER_01
:
9081 case CP0_REG01__RANDOM
:
9083 register_name
= "Random";
9085 case CP0_REG01__VPECONTROL
:
9086 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9087 gen_helper_mtc0_vpecontrol(cpu_env
, arg
);
9088 register_name
= "VPEControl";
9090 case CP0_REG01__VPECONF0
:
9091 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9092 gen_helper_mtc0_vpeconf0(cpu_env
, arg
);
9093 register_name
= "VPEConf0";
9095 case CP0_REG01__VPECONF1
:
9096 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9097 gen_helper_mtc0_vpeconf1(cpu_env
, arg
);
9098 register_name
= "VPEConf1";
9100 case CP0_REG01__YQMASK
:
9101 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9102 gen_helper_mtc0_yqmask(cpu_env
, arg
);
9103 register_name
= "YQMask";
9105 case CP0_REG01__VPESCHEDULE
:
9106 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9107 tcg_gen_st_tl(arg
, cpu_env
,
9108 offsetof(CPUMIPSState
, CP0_VPESchedule
));
9109 register_name
= "VPESchedule";
9111 case CP0_REG01__VPESCHEFBACK
:
9112 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9113 tcg_gen_st_tl(arg
, cpu_env
,
9114 offsetof(CPUMIPSState
, CP0_VPEScheFBack
));
9115 register_name
= "VPEScheFBack";
9117 case CP0_REG01__VPEOPT
:
9118 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9119 gen_helper_mtc0_vpeopt(cpu_env
, arg
);
9120 register_name
= "VPEOpt";
9123 goto cp0_unimplemented
;
9126 case CP0_REGISTER_02
:
9128 case CP0_REG02__ENTRYLO0
:
9129 gen_helper_dmtc0_entrylo0(cpu_env
, arg
);
9130 register_name
= "EntryLo0";
9132 case CP0_REG02__TCSTATUS
:
9133 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9134 gen_helper_mtc0_tcstatus(cpu_env
, arg
);
9135 register_name
= "TCStatus";
9137 case CP0_REG02__TCBIND
:
9138 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9139 gen_helper_mtc0_tcbind(cpu_env
, arg
);
9140 register_name
= "TCBind";
9142 case CP0_REG02__TCRESTART
:
9143 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9144 gen_helper_mtc0_tcrestart(cpu_env
, arg
);
9145 register_name
= "TCRestart";
9147 case CP0_REG02__TCHALT
:
9148 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9149 gen_helper_mtc0_tchalt(cpu_env
, arg
);
9150 register_name
= "TCHalt";
9152 case CP0_REG02__TCCONTEXT
:
9153 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9154 gen_helper_mtc0_tccontext(cpu_env
, arg
);
9155 register_name
= "TCContext";
9157 case CP0_REG02__TCSCHEDULE
:
9158 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9159 gen_helper_mtc0_tcschedule(cpu_env
, arg
);
9160 register_name
= "TCSchedule";
9162 case CP0_REG02__TCSCHEFBACK
:
9163 CP0_CHECK(ctx
->insn_flags
& ASE_MT
);
9164 gen_helper_mtc0_tcschefback(cpu_env
, arg
);
9165 register_name
= "TCScheFBack";
9168 goto cp0_unimplemented
;
9171 case CP0_REGISTER_03
:
9173 case CP0_REG03__ENTRYLO1
:
9174 gen_helper_dmtc0_entrylo1(cpu_env
, arg
);
9175 register_name
= "EntryLo1";
9177 case CP0_REG03__GLOBALNUM
:
9180 register_name
= "GlobalNumber";
9183 goto cp0_unimplemented
;
9186 case CP0_REGISTER_04
:
9188 case CP0_REG04__CONTEXT
:
9189 gen_helper_mtc0_context(cpu_env
, arg
);
9190 register_name
= "Context";
9192 case CP0_REG04__CONTEXTCONFIG
:
9194 /* gen_helper_dmtc0_contextconfig(arg); */
9195 register_name
= "ContextConfig";
9196 goto cp0_unimplemented
;
9197 case CP0_REG04__USERLOCAL
:
9198 CP0_CHECK(ctx
->ulri
);
9199 tcg_gen_st_tl(arg
, cpu_env
,
9200 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
9201 register_name
= "UserLocal";
9203 case CP0_REG04__MMID
:
9205 gen_mfc0_load32(arg
, offsetof(CPUMIPSState
, CP0_MemoryMapID
));
9206 register_name
= "MMID";
9209 goto cp0_unimplemented
;
9212 case CP0_REGISTER_05
:
9214 case CP0_REG05__PAGEMASK
:
9215 gen_helper_mtc0_pagemask(cpu_env
, arg
);
9216 register_name
= "PageMask";
9218 case CP0_REG05__PAGEGRAIN
:
9219 check_insn(ctx
, ISA_MIPS_R2
);
9220 gen_helper_mtc0_pagegrain(cpu_env
, arg
);
9221 register_name
= "PageGrain";
9223 case CP0_REG05__SEGCTL0
:
9225 gen_helper_mtc0_segctl0(cpu_env
, arg
);
9226 register_name
= "SegCtl0";
9228 case CP0_REG05__SEGCTL1
:
9230 gen_helper_mtc0_segctl1(cpu_env
, arg
);
9231 register_name
= "SegCtl1";
9233 case CP0_REG05__SEGCTL2
:
9235 gen_helper_mtc0_segctl2(cpu_env
, arg
);
9236 register_name
= "SegCtl2";
9238 case CP0_REG05__PWBASE
:
9240 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_PWBase
));
9241 register_name
= "PWBase";
9243 case CP0_REG05__PWFIELD
:
9245 gen_helper_mtc0_pwfield(cpu_env
, arg
);
9246 register_name
= "PWField";
9248 case CP0_REG05__PWSIZE
:
9250 gen_helper_mtc0_pwsize(cpu_env
, arg
);
9251 register_name
= "PWSize";
9254 goto cp0_unimplemented
;
9257 case CP0_REGISTER_06
:
9259 case CP0_REG06__WIRED
:
9260 gen_helper_mtc0_wired(cpu_env
, arg
);
9261 register_name
= "Wired";
9263 case CP0_REG06__SRSCONF0
:
9264 check_insn(ctx
, ISA_MIPS_R2
);
9265 gen_helper_mtc0_srsconf0(cpu_env
, arg
);
9266 register_name
= "SRSConf0";
9268 case CP0_REG06__SRSCONF1
:
9269 check_insn(ctx
, ISA_MIPS_R2
);
9270 gen_helper_mtc0_srsconf1(cpu_env
, arg
);
9271 register_name
= "SRSConf1";
9273 case CP0_REG06__SRSCONF2
:
9274 check_insn(ctx
, ISA_MIPS_R2
);
9275 gen_helper_mtc0_srsconf2(cpu_env
, arg
);
9276 register_name
= "SRSConf2";
9278 case CP0_REG06__SRSCONF3
:
9279 check_insn(ctx
, ISA_MIPS_R2
);
9280 gen_helper_mtc0_srsconf3(cpu_env
, arg
);
9281 register_name
= "SRSConf3";
9283 case CP0_REG06__SRSCONF4
:
9284 check_insn(ctx
, ISA_MIPS_R2
);
9285 gen_helper_mtc0_srsconf4(cpu_env
, arg
);
9286 register_name
= "SRSConf4";
9288 case CP0_REG06__PWCTL
:
9290 gen_helper_mtc0_pwctl(cpu_env
, arg
);
9291 register_name
= "PWCtl";
9294 goto cp0_unimplemented
;
9297 case CP0_REGISTER_07
:
9299 case CP0_REG07__HWRENA
:
9300 check_insn(ctx
, ISA_MIPS_R2
);
9301 gen_helper_mtc0_hwrena(cpu_env
, arg
);
9302 ctx
->base
.is_jmp
= DISAS_STOP
;
9303 register_name
= "HWREna";
9306 goto cp0_unimplemented
;
9309 case CP0_REGISTER_08
:
9311 case CP0_REG08__BADVADDR
:
9313 register_name
= "BadVAddr";
9315 case CP0_REG08__BADINSTR
:
9317 register_name
= "BadInstr";
9319 case CP0_REG08__BADINSTRP
:
9321 register_name
= "BadInstrP";
9323 case CP0_REG08__BADINSTRX
:
9325 register_name
= "BadInstrX";
9328 goto cp0_unimplemented
;
9331 case CP0_REGISTER_09
:
9333 case CP0_REG09__COUNT
:
9334 gen_helper_mtc0_count(cpu_env
, arg
);
9335 register_name
= "Count";
9337 case CP0_REG09__SAARI
:
9338 CP0_CHECK(ctx
->saar
);
9339 gen_helper_mtc0_saari(cpu_env
, arg
);
9340 register_name
= "SAARI";
9342 case CP0_REG09__SAAR
:
9343 CP0_CHECK(ctx
->saar
);
9344 gen_helper_mtc0_saar(cpu_env
, arg
);
9345 register_name
= "SAAR";
9348 goto cp0_unimplemented
;
9350 /* Stop translation as we may have switched the execution mode */
9351 ctx
->base
.is_jmp
= DISAS_STOP
;
9353 case CP0_REGISTER_10
:
9355 case CP0_REG10__ENTRYHI
:
9356 gen_helper_mtc0_entryhi(cpu_env
, arg
);
9357 register_name
= "EntryHi";
9360 goto cp0_unimplemented
;
9363 case CP0_REGISTER_11
:
9365 case CP0_REG11__COMPARE
:
9366 gen_helper_mtc0_compare(cpu_env
, arg
);
9367 register_name
= "Compare";
9369 /* 6,7 are implementation dependent */
9371 goto cp0_unimplemented
;
9373 /* Stop translation as we may have switched the execution mode */
9374 ctx
->base
.is_jmp
= DISAS_STOP
;
9376 case CP0_REGISTER_12
:
9378 case CP0_REG12__STATUS
:
9379 save_cpu_state(ctx
, 1);
9380 gen_helper_mtc0_status(cpu_env
, arg
);
9381 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9382 gen_save_pc(ctx
->base
.pc_next
+ 4);
9383 ctx
->base
.is_jmp
= DISAS_EXIT
;
9384 register_name
= "Status";
9386 case CP0_REG12__INTCTL
:
9387 check_insn(ctx
, ISA_MIPS_R2
);
9388 gen_helper_mtc0_intctl(cpu_env
, arg
);
9389 /* Stop translation as we may have switched the execution mode */
9390 ctx
->base
.is_jmp
= DISAS_STOP
;
9391 register_name
= "IntCtl";
9393 case CP0_REG12__SRSCTL
:
9394 check_insn(ctx
, ISA_MIPS_R2
);
9395 gen_helper_mtc0_srsctl(cpu_env
, arg
);
9396 /* Stop translation as we may have switched the execution mode */
9397 ctx
->base
.is_jmp
= DISAS_STOP
;
9398 register_name
= "SRSCtl";
9400 case CP0_REG12__SRSMAP
:
9401 check_insn(ctx
, ISA_MIPS_R2
);
9402 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_SRSMap
));
9403 /* Stop translation as we may have switched the execution mode */
9404 ctx
->base
.is_jmp
= DISAS_STOP
;
9405 register_name
= "SRSMap";
9408 goto cp0_unimplemented
;
9411 case CP0_REGISTER_13
:
9413 case CP0_REG13__CAUSE
:
9414 save_cpu_state(ctx
, 1);
9415 gen_helper_mtc0_cause(cpu_env
, arg
);
9417 * Stop translation as we may have triggered an interrupt.
9418 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9419 * translated code to check for pending interrupts.
9421 gen_save_pc(ctx
->base
.pc_next
+ 4);
9422 ctx
->base
.is_jmp
= DISAS_EXIT
;
9423 register_name
= "Cause";
9426 goto cp0_unimplemented
;
9429 case CP0_REGISTER_14
:
9431 case CP0_REG14__EPC
:
9432 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_EPC
));
9433 register_name
= "EPC";
9436 goto cp0_unimplemented
;
9439 case CP0_REGISTER_15
:
9441 case CP0_REG15__PRID
:
9443 register_name
= "PRid";
9445 case CP0_REG15__EBASE
:
9446 check_insn(ctx
, ISA_MIPS_R2
);
9447 gen_helper_mtc0_ebase(cpu_env
, arg
);
9448 register_name
= "EBase";
9451 goto cp0_unimplemented
;
9454 case CP0_REGISTER_16
:
9456 case CP0_REG16__CONFIG
:
9457 gen_helper_mtc0_config0(cpu_env
, arg
);
9458 register_name
= "Config";
9459 /* Stop translation as we may have switched the execution mode */
9460 ctx
->base
.is_jmp
= DISAS_STOP
;
9462 case CP0_REG16__CONFIG1
:
9463 /* ignored, read only */
9464 register_name
= "Config1";
9466 case CP0_REG16__CONFIG2
:
9467 gen_helper_mtc0_config2(cpu_env
, arg
);
9468 register_name
= "Config2";
9469 /* Stop translation as we may have switched the execution mode */
9470 ctx
->base
.is_jmp
= DISAS_STOP
;
9472 case CP0_REG16__CONFIG3
:
9473 gen_helper_mtc0_config3(cpu_env
, arg
);
9474 register_name
= "Config3";
9475 /* Stop translation as we may have switched the execution mode */
9476 ctx
->base
.is_jmp
= DISAS_STOP
;
9478 case CP0_REG16__CONFIG4
:
9479 /* currently ignored */
9480 register_name
= "Config4";
9482 case CP0_REG16__CONFIG5
:
9483 gen_helper_mtc0_config5(cpu_env
, arg
);
9484 register_name
= "Config5";
9485 /* Stop translation as we may have switched the execution mode */
9486 ctx
->base
.is_jmp
= DISAS_STOP
;
9488 /* 6,7 are implementation dependent */
9490 register_name
= "Invalid config selector";
9491 goto cp0_unimplemented
;
9494 case CP0_REGISTER_17
:
9496 case CP0_REG17__LLADDR
:
9497 gen_helper_mtc0_lladdr(cpu_env
, arg
);
9498 register_name
= "LLAddr";
9500 case CP0_REG17__MAAR
:
9501 CP0_CHECK(ctx
->mrp
);
9502 gen_helper_mtc0_maar(cpu_env
, arg
);
9503 register_name
= "MAAR";
9505 case CP0_REG17__MAARI
:
9506 CP0_CHECK(ctx
->mrp
);
9507 gen_helper_mtc0_maari(cpu_env
, arg
);
9508 register_name
= "MAARI";
9511 goto cp0_unimplemented
;
9514 case CP0_REGISTER_18
:
9516 case CP0_REG18__WATCHLO0
:
9517 case CP0_REG18__WATCHLO1
:
9518 case CP0_REG18__WATCHLO2
:
9519 case CP0_REG18__WATCHLO3
:
9520 case CP0_REG18__WATCHLO4
:
9521 case CP0_REG18__WATCHLO5
:
9522 case CP0_REG18__WATCHLO6
:
9523 case CP0_REG18__WATCHLO7
:
9524 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9525 gen_helper_0e1i(mtc0_watchlo
, arg
, sel
);
9526 register_name
= "WatchLo";
9529 goto cp0_unimplemented
;
9532 case CP0_REGISTER_19
:
9534 case CP0_REG19__WATCHHI0
:
9535 case CP0_REG19__WATCHHI1
:
9536 case CP0_REG19__WATCHHI2
:
9537 case CP0_REG19__WATCHHI3
:
9538 case CP0_REG19__WATCHHI4
:
9539 case CP0_REG19__WATCHHI5
:
9540 case CP0_REG19__WATCHHI6
:
9541 case CP0_REG19__WATCHHI7
:
9542 CP0_CHECK(ctx
->CP0_Config1
& (1 << CP0C1_WR
));
9543 gen_helper_0e1i(mtc0_watchhi
, arg
, sel
);
9544 register_name
= "WatchHi";
9547 goto cp0_unimplemented
;
9550 case CP0_REGISTER_20
:
9552 case CP0_REG20__XCONTEXT
:
9553 check_insn(ctx
, ISA_MIPS3
);
9554 gen_helper_mtc0_xcontext(cpu_env
, arg
);
9555 register_name
= "XContext";
9558 goto cp0_unimplemented
;
9561 case CP0_REGISTER_21
:
9562 /* Officially reserved, but sel 0 is used for R1x000 framemask */
9563 CP0_CHECK(!(ctx
->insn_flags
& ISA_MIPS_R6
));
9566 gen_helper_mtc0_framemask(cpu_env
, arg
);
9567 register_name
= "Framemask";
9570 goto cp0_unimplemented
;
9573 case CP0_REGISTER_22
:
9575 register_name
= "Diagnostic"; /* implementation dependent */
9577 case CP0_REGISTER_23
:
9579 case CP0_REG23__DEBUG
:
9580 gen_helper_mtc0_debug(cpu_env
, arg
); /* EJTAG support */
9581 /* DISAS_STOP isn't good enough here, hflags may have changed. */
9582 gen_save_pc(ctx
->base
.pc_next
+ 4);
9583 ctx
->base
.is_jmp
= DISAS_EXIT
;
9584 register_name
= "Debug";
9586 case CP0_REG23__TRACECONTROL
:
9587 /* PDtrace support */
9588 /* gen_helper_mtc0_tracecontrol(cpu_env, arg); */
9589 /* Stop translation as we may have switched the execution mode */
9590 ctx
->base
.is_jmp
= DISAS_STOP
;
9591 register_name
= "TraceControl";
9592 goto cp0_unimplemented
;
9593 case CP0_REG23__TRACECONTROL2
:
9594 /* PDtrace support */
9595 /* gen_helper_mtc0_tracecontrol2(cpu_env, arg); */
9596 /* Stop translation as we may have switched the execution mode */
9597 ctx
->base
.is_jmp
= DISAS_STOP
;
9598 register_name
= "TraceControl2";
9599 goto cp0_unimplemented
;
9600 case CP0_REG23__USERTRACEDATA1
:
9601 /* PDtrace support */
9602 /* gen_helper_mtc0_usertracedata1(cpu_env, arg);*/
9603 /* Stop translation as we may have switched the execution mode */
9604 ctx
->base
.is_jmp
= DISAS_STOP
;
9605 register_name
= "UserTraceData1";
9606 goto cp0_unimplemented
;
9607 case CP0_REG23__TRACEIBPC
:
9608 /* PDtrace support */
9609 /* gen_helper_mtc0_traceibpc(cpu_env, arg); */
9610 /* Stop translation as we may have switched the execution mode */
9611 ctx
->base
.is_jmp
= DISAS_STOP
;
9612 register_name
= "TraceIBPC";
9613 goto cp0_unimplemented
;
9614 case CP0_REG23__TRACEDBPC
:
9615 /* PDtrace support */
9616 /* gen_helper_mtc0_tracedbpc(cpu_env, arg); */
9617 /* Stop translation as we may have switched the execution mode */
9618 ctx
->base
.is_jmp
= DISAS_STOP
;
9619 register_name
= "TraceDBPC";
9620 goto cp0_unimplemented
;
9622 goto cp0_unimplemented
;
9625 case CP0_REGISTER_24
:
9627 case CP0_REG24__DEPC
:
9629 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_DEPC
));
9630 register_name
= "DEPC";
9633 goto cp0_unimplemented
;
9636 case CP0_REGISTER_25
:
9638 case CP0_REG25__PERFCTL0
:
9639 gen_helper_mtc0_performance0(cpu_env
, arg
);
9640 register_name
= "Performance0";
9642 case CP0_REG25__PERFCNT0
:
9643 /* gen_helper_mtc0_performance1(cpu_env, arg); */
9644 register_name
= "Performance1";
9645 goto cp0_unimplemented
;
9646 case CP0_REG25__PERFCTL1
:
9647 /* gen_helper_mtc0_performance2(cpu_env, arg); */
9648 register_name
= "Performance2";
9649 goto cp0_unimplemented
;
9650 case CP0_REG25__PERFCNT1
:
9651 /* gen_helper_mtc0_performance3(cpu_env, arg); */
9652 register_name
= "Performance3";
9653 goto cp0_unimplemented
;
9654 case CP0_REG25__PERFCTL2
:
9655 /* gen_helper_mtc0_performance4(cpu_env, arg); */
9656 register_name
= "Performance4";
9657 goto cp0_unimplemented
;
9658 case CP0_REG25__PERFCNT2
:
9659 /* gen_helper_mtc0_performance5(cpu_env, arg); */
9660 register_name
= "Performance5";
9661 goto cp0_unimplemented
;
9662 case CP0_REG25__PERFCTL3
:
9663 /* gen_helper_mtc0_performance6(cpu_env, arg); */
9664 register_name
= "Performance6";
9665 goto cp0_unimplemented
;
9666 case CP0_REG25__PERFCNT3
:
9667 /* gen_helper_mtc0_performance7(cpu_env, arg); */
9668 register_name
= "Performance7";
9669 goto cp0_unimplemented
;
9671 goto cp0_unimplemented
;
9674 case CP0_REGISTER_26
:
9676 case CP0_REG26__ERRCTL
:
9677 gen_helper_mtc0_errctl(cpu_env
, arg
);
9678 ctx
->base
.is_jmp
= DISAS_STOP
;
9679 register_name
= "ErrCtl";
9682 goto cp0_unimplemented
;
9685 case CP0_REGISTER_27
:
9687 case CP0_REG27__CACHERR
:
9689 register_name
= "CacheErr";
9692 goto cp0_unimplemented
;
9695 case CP0_REGISTER_28
:
9697 case CP0_REG28__TAGLO
:
9698 case CP0_REG28__TAGLO1
:
9699 case CP0_REG28__TAGLO2
:
9700 case CP0_REG28__TAGLO3
:
9701 gen_helper_mtc0_taglo(cpu_env
, arg
);
9702 register_name
= "TagLo";
9704 case CP0_REG28__DATALO
:
9705 case CP0_REG28__DATALO1
:
9706 case CP0_REG28__DATALO2
:
9707 case CP0_REG28__DATALO3
:
9708 gen_helper_mtc0_datalo(cpu_env
, arg
);
9709 register_name
= "DataLo";
9712 goto cp0_unimplemented
;
9715 case CP0_REGISTER_29
:
9717 case CP0_REG29__TAGHI
:
9718 case CP0_REG29__TAGHI1
:
9719 case CP0_REG29__TAGHI2
:
9720 case CP0_REG29__TAGHI3
:
9721 gen_helper_mtc0_taghi(cpu_env
, arg
);
9722 register_name
= "TagHi";
9724 case CP0_REG29__DATAHI
:
9725 case CP0_REG29__DATAHI1
:
9726 case CP0_REG29__DATAHI2
:
9727 case CP0_REG29__DATAHI3
:
9728 gen_helper_mtc0_datahi(cpu_env
, arg
);
9729 register_name
= "DataHi";
9732 register_name
= "invalid sel";
9733 goto cp0_unimplemented
;
9736 case CP0_REGISTER_30
:
9738 case CP0_REG30__ERROREPC
:
9739 tcg_gen_st_tl(arg
, cpu_env
, offsetof(CPUMIPSState
, CP0_ErrorEPC
));
9740 register_name
= "ErrorEPC";
9743 goto cp0_unimplemented
;
9746 case CP0_REGISTER_31
:
9748 case CP0_REG31__DESAVE
:
9750 gen_mtc0_store32(arg
, offsetof(CPUMIPSState
, CP0_DESAVE
));
9751 register_name
= "DESAVE";
9753 case CP0_REG31__KSCRATCH1
:
9754 case CP0_REG31__KSCRATCH2
:
9755 case CP0_REG31__KSCRATCH3
:
9756 case CP0_REG31__KSCRATCH4
:
9757 case CP0_REG31__KSCRATCH5
:
9758 case CP0_REG31__KSCRATCH6
:
9759 CP0_CHECK(ctx
->kscrexist
& (1 << sel
));
9760 tcg_gen_st_tl(arg
, cpu_env
,
9761 offsetof(CPUMIPSState
, CP0_KScratch
[sel
- 2]));
9762 register_name
= "KScratch";
9765 goto cp0_unimplemented
;
9769 goto cp0_unimplemented
;
9771 trace_mips_translate_c0("dmtc0", register_name
, reg
, sel
);
9773 /* For simplicity assume that all writes can cause interrupts. */
9774 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
9776 * DISAS_STOP isn't sufficient, we need to ensure we break out of
9777 * translated code to check for pending interrupts.
9779 gen_save_pc(ctx
->base
.pc_next
+ 4);
9780 ctx
->base
.is_jmp
= DISAS_EXIT
;
9785 qemu_log_mask(LOG_UNIMP
, "dmtc0 %s (reg %d sel %d)\n",
9786 register_name
, reg
, sel
);
9788 #endif /* TARGET_MIPS64 */
9790 static void gen_mftr(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rd
,
9791 int u
, int sel
, int h
)
9793 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
9794 TCGv t0
= tcg_temp_local_new();
9796 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
9797 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
9798 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
9799 tcg_gen_movi_tl(t0
, -1);
9800 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
9801 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
9802 tcg_gen_movi_tl(t0
, -1);
9803 } else if (u
== 0) {
9808 gen_helper_mftc0_vpecontrol(t0
, cpu_env
);
9811 gen_helper_mftc0_vpeconf0(t0
, cpu_env
);
9821 gen_helper_mftc0_tcstatus(t0
, cpu_env
);
9824 gen_helper_mftc0_tcbind(t0
, cpu_env
);
9827 gen_helper_mftc0_tcrestart(t0
, cpu_env
);
9830 gen_helper_mftc0_tchalt(t0
, cpu_env
);
9833 gen_helper_mftc0_tccontext(t0
, cpu_env
);
9836 gen_helper_mftc0_tcschedule(t0
, cpu_env
);
9839 gen_helper_mftc0_tcschefback(t0
, cpu_env
);
9842 gen_mfc0(ctx
, t0
, rt
, sel
);
9849 gen_helper_mftc0_entryhi(t0
, cpu_env
);
9852 gen_mfc0(ctx
, t0
, rt
, sel
);
9859 gen_helper_mftc0_status(t0
, cpu_env
);
9862 gen_mfc0(ctx
, t0
, rt
, sel
);
9869 gen_helper_mftc0_cause(t0
, cpu_env
);
9879 gen_helper_mftc0_epc(t0
, cpu_env
);
9889 gen_helper_mftc0_ebase(t0
, cpu_env
);
9906 gen_helper_mftc0_configx(t0
, cpu_env
, tcg_const_tl(sel
));
9916 gen_helper_mftc0_debug(t0
, cpu_env
);
9919 gen_mfc0(ctx
, t0
, rt
, sel
);
9924 gen_mfc0(ctx
, t0
, rt
, sel
);
9928 /* GPR registers. */
9930 gen_helper_1e0i(mftgpr
, t0
, rt
);
9932 /* Auxiliary CPU registers */
9936 gen_helper_1e0i(mftlo
, t0
, 0);
9939 gen_helper_1e0i(mfthi
, t0
, 0);
9942 gen_helper_1e0i(mftacx
, t0
, 0);
9945 gen_helper_1e0i(mftlo
, t0
, 1);
9948 gen_helper_1e0i(mfthi
, t0
, 1);
9951 gen_helper_1e0i(mftacx
, t0
, 1);
9954 gen_helper_1e0i(mftlo
, t0
, 2);
9957 gen_helper_1e0i(mfthi
, t0
, 2);
9960 gen_helper_1e0i(mftacx
, t0
, 2);
9963 gen_helper_1e0i(mftlo
, t0
, 3);
9966 gen_helper_1e0i(mfthi
, t0
, 3);
9969 gen_helper_1e0i(mftacx
, t0
, 3);
9972 gen_helper_mftdsp(t0
, cpu_env
);
9978 /* Floating point (COP1). */
9980 /* XXX: For now we support only a single FPU context. */
9982 TCGv_i32 fp0
= tcg_temp_new_i32();
9984 gen_load_fpr32(ctx
, fp0
, rt
);
9985 tcg_gen_ext_i32_tl(t0
, fp0
);
9986 tcg_temp_free_i32(fp0
);
9988 TCGv_i32 fp0
= tcg_temp_new_i32();
9990 gen_load_fpr32h(ctx
, fp0
, rt
);
9991 tcg_gen_ext_i32_tl(t0
, fp0
);
9992 tcg_temp_free_i32(fp0
);
9996 /* XXX: For now we support only a single FPU context. */
9997 gen_helper_1e0i(cfc1
, t0
, rt
);
9999 /* COP2: Not implemented. */
10007 trace_mips_translate_tr("mftr", rt
, u
, sel
, h
);
10008 gen_store_gpr(t0
, rd
);
10014 LOG_DISAS("mftr (reg %d u %d sel %d h %d)\n", rt
, u
, sel
, h
);
10015 gen_reserved_instruction(ctx
);
10018 static void gen_mttr(CPUMIPSState
*env
, DisasContext
*ctx
, int rd
, int rt
,
10019 int u
, int sel
, int h
)
10021 int other_tc
= env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
);
10022 TCGv t0
= tcg_temp_local_new();
10024 gen_load_gpr(t0
, rt
);
10025 if ((env
->CP0_VPEConf0
& (1 << CP0VPEC0_MVP
)) == 0 &&
10026 ((env
->tcs
[other_tc
].CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)) !=
10027 (env
->active_tc
.CP0_TCBind
& (0xf << CP0TCBd_CurVPE
)))) {
10030 } else if ((env
->CP0_VPEControl
& (0xff << CP0VPECo_TargTC
)) >
10031 (env
->mvp
->CP0_MVPConf0
& (0xff << CP0MVPC0_PTC
))) {
10034 } else if (u
== 0) {
10039 gen_helper_mttc0_vpecontrol(cpu_env
, t0
);
10042 gen_helper_mttc0_vpeconf0(cpu_env
, t0
);
10052 gen_helper_mttc0_tcstatus(cpu_env
, t0
);
10055 gen_helper_mttc0_tcbind(cpu_env
, t0
);
10058 gen_helper_mttc0_tcrestart(cpu_env
, t0
);
10061 gen_helper_mttc0_tchalt(cpu_env
, t0
);
10064 gen_helper_mttc0_tccontext(cpu_env
, t0
);
10067 gen_helper_mttc0_tcschedule(cpu_env
, t0
);
10070 gen_helper_mttc0_tcschefback(cpu_env
, t0
);
10073 gen_mtc0(ctx
, t0
, rd
, sel
);
10080 gen_helper_mttc0_entryhi(cpu_env
, t0
);
10083 gen_mtc0(ctx
, t0
, rd
, sel
);
10090 gen_helper_mttc0_status(cpu_env
, t0
);
10093 gen_mtc0(ctx
, t0
, rd
, sel
);
10100 gen_helper_mttc0_cause(cpu_env
, t0
);
10110 gen_helper_mttc0_ebase(cpu_env
, t0
);
10120 gen_helper_mttc0_debug(cpu_env
, t0
);
10123 gen_mtc0(ctx
, t0
, rd
, sel
);
10128 gen_mtc0(ctx
, t0
, rd
, sel
);
10132 /* GPR registers. */
10134 gen_helper_0e1i(mttgpr
, t0
, rd
);
10136 /* Auxiliary CPU registers */
10140 gen_helper_0e1i(mttlo
, t0
, 0);
10143 gen_helper_0e1i(mtthi
, t0
, 0);
10146 gen_helper_0e1i(mttacx
, t0
, 0);
10149 gen_helper_0e1i(mttlo
, t0
, 1);
10152 gen_helper_0e1i(mtthi
, t0
, 1);
10155 gen_helper_0e1i(mttacx
, t0
, 1);
10158 gen_helper_0e1i(mttlo
, t0
, 2);
10161 gen_helper_0e1i(mtthi
, t0
, 2);
10164 gen_helper_0e1i(mttacx
, t0
, 2);
10167 gen_helper_0e1i(mttlo
, t0
, 3);
10170 gen_helper_0e1i(mtthi
, t0
, 3);
10173 gen_helper_0e1i(mttacx
, t0
, 3);
10176 gen_helper_mttdsp(cpu_env
, t0
);
10182 /* Floating point (COP1). */
10184 /* XXX: For now we support only a single FPU context. */
10186 TCGv_i32 fp0
= tcg_temp_new_i32();
10188 tcg_gen_trunc_tl_i32(fp0
, t0
);
10189 gen_store_fpr32(ctx
, fp0
, rd
);
10190 tcg_temp_free_i32(fp0
);
10192 TCGv_i32 fp0
= tcg_temp_new_i32();
10194 tcg_gen_trunc_tl_i32(fp0
, t0
);
10195 gen_store_fpr32h(ctx
, fp0
, rd
);
10196 tcg_temp_free_i32(fp0
);
10200 /* XXX: For now we support only a single FPU context. */
10202 TCGv_i32 fs_tmp
= tcg_const_i32(rd
);
10204 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10205 tcg_temp_free_i32(fs_tmp
);
10207 /* Stop translation as we may have changed hflags */
10208 ctx
->base
.is_jmp
= DISAS_STOP
;
10210 /* COP2: Not implemented. */
10218 trace_mips_translate_tr("mttr", rd
, u
, sel
, h
);
10224 LOG_DISAS("mttr (reg %d u %d sel %d h %d)\n", rd
, u
, sel
, h
);
10225 gen_reserved_instruction(ctx
);
10228 static void gen_cp0(CPUMIPSState
*env
, DisasContext
*ctx
, uint32_t opc
,
10231 const char *opn
= "ldst";
10233 check_cp0_enabled(ctx
);
10237 /* Treat as NOP. */
10240 gen_mfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10245 TCGv t0
= tcg_temp_new();
10247 gen_load_gpr(t0
, rt
);
10248 gen_mtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10253 #if defined(TARGET_MIPS64)
10255 check_insn(ctx
, ISA_MIPS3
);
10257 /* Treat as NOP. */
10260 gen_dmfc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10264 check_insn(ctx
, ISA_MIPS3
);
10266 TCGv t0
= tcg_temp_new();
10268 gen_load_gpr(t0
, rt
);
10269 gen_dmtc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10278 /* Treat as NOP. */
10281 gen_mfhc0(ctx
, cpu_gpr
[rt
], rd
, ctx
->opcode
& 0x7);
10287 TCGv t0
= tcg_temp_new();
10288 gen_load_gpr(t0
, rt
);
10289 gen_mthc0(ctx
, t0
, rd
, ctx
->opcode
& 0x7);
10295 check_cp0_enabled(ctx
);
10297 /* Treat as NOP. */
10300 gen_mftr(env
, ctx
, rt
, rd
, (ctx
->opcode
>> 5) & 1,
10301 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10305 check_cp0_enabled(ctx
);
10306 gen_mttr(env
, ctx
, rd
, rt
, (ctx
->opcode
>> 5) & 1,
10307 ctx
->opcode
& 0x7, (ctx
->opcode
>> 4) & 1);
10312 if (!env
->tlb
->helper_tlbwi
) {
10315 gen_helper_tlbwi(cpu_env
);
10319 if (ctx
->ie
>= 2) {
10320 if (!env
->tlb
->helper_tlbinv
) {
10323 gen_helper_tlbinv(cpu_env
);
10324 } /* treat as nop if TLBINV not supported */
10328 if (ctx
->ie
>= 2) {
10329 if (!env
->tlb
->helper_tlbinvf
) {
10332 gen_helper_tlbinvf(cpu_env
);
10333 } /* treat as nop if TLBINV not supported */
10337 if (!env
->tlb
->helper_tlbwr
) {
10340 gen_helper_tlbwr(cpu_env
);
10344 if (!env
->tlb
->helper_tlbp
) {
10347 gen_helper_tlbp(cpu_env
);
10351 if (!env
->tlb
->helper_tlbr
) {
10354 gen_helper_tlbr(cpu_env
);
10356 case OPC_ERET
: /* OPC_ERETNC */
10357 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
10358 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10361 int bit_shift
= (ctx
->hflags
& MIPS_HFLAG_M16
) ? 16 : 6;
10362 if (ctx
->opcode
& (1 << bit_shift
)) {
10365 check_insn(ctx
, ISA_MIPS_R5
);
10366 gen_helper_eretnc(cpu_env
);
10370 check_insn(ctx
, ISA_MIPS2
);
10371 gen_helper_eret(cpu_env
);
10373 ctx
->base
.is_jmp
= DISAS_EXIT
;
10378 check_insn(ctx
, ISA_MIPS_R1
);
10379 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
10380 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10383 if (!(ctx
->hflags
& MIPS_HFLAG_DM
)) {
10385 gen_reserved_instruction(ctx
);
10387 gen_helper_deret(cpu_env
);
10388 ctx
->base
.is_jmp
= DISAS_EXIT
;
10393 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
10394 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
10395 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10398 /* If we get an exception, we want to restart at next instruction */
10399 ctx
->base
.pc_next
+= 4;
10400 save_cpu_state(ctx
, 1);
10401 ctx
->base
.pc_next
-= 4;
10402 gen_helper_wait(cpu_env
);
10403 ctx
->base
.is_jmp
= DISAS_NORETURN
;
10408 gen_reserved_instruction(ctx
);
10411 (void)opn
; /* avoid a compiler warning */
10413 #endif /* !CONFIG_USER_ONLY */
10415 /* CP1 Branches (before delay slot) */
10416 static void gen_compute_branch1(DisasContext
*ctx
, uint32_t op
,
10417 int32_t cc
, int32_t offset
)
10419 target_ulong btarget
;
10420 TCGv_i32 t0
= tcg_temp_new_i32();
10422 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
10423 gen_reserved_instruction(ctx
);
10428 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
10431 btarget
= ctx
->base
.pc_next
+ 4 + offset
;
10435 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10436 tcg_gen_not_i32(t0
, t0
);
10437 tcg_gen_andi_i32(t0
, t0
, 1);
10438 tcg_gen_extu_i32_tl(bcond
, t0
);
10441 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10442 tcg_gen_not_i32(t0
, t0
);
10443 tcg_gen_andi_i32(t0
, t0
, 1);
10444 tcg_gen_extu_i32_tl(bcond
, t0
);
10447 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10448 tcg_gen_andi_i32(t0
, t0
, 1);
10449 tcg_gen_extu_i32_tl(bcond
, t0
);
10452 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10453 tcg_gen_andi_i32(t0
, t0
, 1);
10454 tcg_gen_extu_i32_tl(bcond
, t0
);
10456 ctx
->hflags
|= MIPS_HFLAG_BL
;
10460 TCGv_i32 t1
= tcg_temp_new_i32();
10461 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10462 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10463 tcg_gen_nand_i32(t0
, t0
, t1
);
10464 tcg_temp_free_i32(t1
);
10465 tcg_gen_andi_i32(t0
, t0
, 1);
10466 tcg_gen_extu_i32_tl(bcond
, t0
);
10471 TCGv_i32 t1
= tcg_temp_new_i32();
10472 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10473 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10474 tcg_gen_or_i32(t0
, t0
, t1
);
10475 tcg_temp_free_i32(t1
);
10476 tcg_gen_andi_i32(t0
, t0
, 1);
10477 tcg_gen_extu_i32_tl(bcond
, t0
);
10482 TCGv_i32 t1
= tcg_temp_new_i32();
10483 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10484 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10485 tcg_gen_and_i32(t0
, t0
, t1
);
10486 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10487 tcg_gen_and_i32(t0
, t0
, t1
);
10488 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10489 tcg_gen_nand_i32(t0
, t0
, t1
);
10490 tcg_temp_free_i32(t1
);
10491 tcg_gen_andi_i32(t0
, t0
, 1);
10492 tcg_gen_extu_i32_tl(bcond
, t0
);
10497 TCGv_i32 t1
= tcg_temp_new_i32();
10498 tcg_gen_shri_i32(t0
, fpu_fcr31
, get_fp_bit(cc
));
10499 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 1));
10500 tcg_gen_or_i32(t0
, t0
, t1
);
10501 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 2));
10502 tcg_gen_or_i32(t0
, t0
, t1
);
10503 tcg_gen_shri_i32(t1
, fpu_fcr31
, get_fp_bit(cc
+ 3));
10504 tcg_gen_or_i32(t0
, t0
, t1
);
10505 tcg_temp_free_i32(t1
);
10506 tcg_gen_andi_i32(t0
, t0
, 1);
10507 tcg_gen_extu_i32_tl(bcond
, t0
);
10510 ctx
->hflags
|= MIPS_HFLAG_BC
;
10513 MIPS_INVAL("cp1 cond branch");
10514 gen_reserved_instruction(ctx
);
10517 ctx
->btarget
= btarget
;
10518 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10520 tcg_temp_free_i32(t0
);
10523 /* R6 CP1 Branches */
10524 static void gen_compute_branch1_r6(DisasContext
*ctx
, uint32_t op
,
10525 int32_t ft
, int32_t offset
,
10526 int delayslot_size
)
10528 target_ulong btarget
;
10529 TCGv_i64 t0
= tcg_temp_new_i64();
10531 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
10532 #ifdef MIPS_DEBUG_DISAS
10533 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
10534 "\n", ctx
->base
.pc_next
);
10536 gen_reserved_instruction(ctx
);
10540 gen_load_fpr64(ctx
, t0
, ft
);
10541 tcg_gen_andi_i64(t0
, t0
, 1);
10543 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
10547 tcg_gen_xori_i64(t0
, t0
, 1);
10548 ctx
->hflags
|= MIPS_HFLAG_BC
;
10551 /* t0 already set */
10552 ctx
->hflags
|= MIPS_HFLAG_BC
;
10555 MIPS_INVAL("cp1 cond branch");
10556 gen_reserved_instruction(ctx
);
10560 tcg_gen_trunc_i64_tl(bcond
, t0
);
10562 ctx
->btarget
= btarget
;
10564 switch (delayslot_size
) {
10566 ctx
->hflags
|= MIPS_HFLAG_BDS16
;
10569 ctx
->hflags
|= MIPS_HFLAG_BDS32
;
10574 tcg_temp_free_i64(t0
);
10577 /* Coprocessor 1 (FPU) */
10579 #define FOP(func, fmt) (((fmt) << 21) | (func))
10582 OPC_ADD_S
= FOP(0, FMT_S
),
10583 OPC_SUB_S
= FOP(1, FMT_S
),
10584 OPC_MUL_S
= FOP(2, FMT_S
),
10585 OPC_DIV_S
= FOP(3, FMT_S
),
10586 OPC_SQRT_S
= FOP(4, FMT_S
),
10587 OPC_ABS_S
= FOP(5, FMT_S
),
10588 OPC_MOV_S
= FOP(6, FMT_S
),
10589 OPC_NEG_S
= FOP(7, FMT_S
),
10590 OPC_ROUND_L_S
= FOP(8, FMT_S
),
10591 OPC_TRUNC_L_S
= FOP(9, FMT_S
),
10592 OPC_CEIL_L_S
= FOP(10, FMT_S
),
10593 OPC_FLOOR_L_S
= FOP(11, FMT_S
),
10594 OPC_ROUND_W_S
= FOP(12, FMT_S
),
10595 OPC_TRUNC_W_S
= FOP(13, FMT_S
),
10596 OPC_CEIL_W_S
= FOP(14, FMT_S
),
10597 OPC_FLOOR_W_S
= FOP(15, FMT_S
),
10598 OPC_SEL_S
= FOP(16, FMT_S
),
10599 OPC_MOVCF_S
= FOP(17, FMT_S
),
10600 OPC_MOVZ_S
= FOP(18, FMT_S
),
10601 OPC_MOVN_S
= FOP(19, FMT_S
),
10602 OPC_SELEQZ_S
= FOP(20, FMT_S
),
10603 OPC_RECIP_S
= FOP(21, FMT_S
),
10604 OPC_RSQRT_S
= FOP(22, FMT_S
),
10605 OPC_SELNEZ_S
= FOP(23, FMT_S
),
10606 OPC_MADDF_S
= FOP(24, FMT_S
),
10607 OPC_MSUBF_S
= FOP(25, FMT_S
),
10608 OPC_RINT_S
= FOP(26, FMT_S
),
10609 OPC_CLASS_S
= FOP(27, FMT_S
),
10610 OPC_MIN_S
= FOP(28, FMT_S
),
10611 OPC_RECIP2_S
= FOP(28, FMT_S
),
10612 OPC_MINA_S
= FOP(29, FMT_S
),
10613 OPC_RECIP1_S
= FOP(29, FMT_S
),
10614 OPC_MAX_S
= FOP(30, FMT_S
),
10615 OPC_RSQRT1_S
= FOP(30, FMT_S
),
10616 OPC_MAXA_S
= FOP(31, FMT_S
),
10617 OPC_RSQRT2_S
= FOP(31, FMT_S
),
10618 OPC_CVT_D_S
= FOP(33, FMT_S
),
10619 OPC_CVT_W_S
= FOP(36, FMT_S
),
10620 OPC_CVT_L_S
= FOP(37, FMT_S
),
10621 OPC_CVT_PS_S
= FOP(38, FMT_S
),
10622 OPC_CMP_F_S
= FOP(48, FMT_S
),
10623 OPC_CMP_UN_S
= FOP(49, FMT_S
),
10624 OPC_CMP_EQ_S
= FOP(50, FMT_S
),
10625 OPC_CMP_UEQ_S
= FOP(51, FMT_S
),
10626 OPC_CMP_OLT_S
= FOP(52, FMT_S
),
10627 OPC_CMP_ULT_S
= FOP(53, FMT_S
),
10628 OPC_CMP_OLE_S
= FOP(54, FMT_S
),
10629 OPC_CMP_ULE_S
= FOP(55, FMT_S
),
10630 OPC_CMP_SF_S
= FOP(56, FMT_S
),
10631 OPC_CMP_NGLE_S
= FOP(57, FMT_S
),
10632 OPC_CMP_SEQ_S
= FOP(58, FMT_S
),
10633 OPC_CMP_NGL_S
= FOP(59, FMT_S
),
10634 OPC_CMP_LT_S
= FOP(60, FMT_S
),
10635 OPC_CMP_NGE_S
= FOP(61, FMT_S
),
10636 OPC_CMP_LE_S
= FOP(62, FMT_S
),
10637 OPC_CMP_NGT_S
= FOP(63, FMT_S
),
10639 OPC_ADD_D
= FOP(0, FMT_D
),
10640 OPC_SUB_D
= FOP(1, FMT_D
),
10641 OPC_MUL_D
= FOP(2, FMT_D
),
10642 OPC_DIV_D
= FOP(3, FMT_D
),
10643 OPC_SQRT_D
= FOP(4, FMT_D
),
10644 OPC_ABS_D
= FOP(5, FMT_D
),
10645 OPC_MOV_D
= FOP(6, FMT_D
),
10646 OPC_NEG_D
= FOP(7, FMT_D
),
10647 OPC_ROUND_L_D
= FOP(8, FMT_D
),
10648 OPC_TRUNC_L_D
= FOP(9, FMT_D
),
10649 OPC_CEIL_L_D
= FOP(10, FMT_D
),
10650 OPC_FLOOR_L_D
= FOP(11, FMT_D
),
10651 OPC_ROUND_W_D
= FOP(12, FMT_D
),
10652 OPC_TRUNC_W_D
= FOP(13, FMT_D
),
10653 OPC_CEIL_W_D
= FOP(14, FMT_D
),
10654 OPC_FLOOR_W_D
= FOP(15, FMT_D
),
10655 OPC_SEL_D
= FOP(16, FMT_D
),
10656 OPC_MOVCF_D
= FOP(17, FMT_D
),
10657 OPC_MOVZ_D
= FOP(18, FMT_D
),
10658 OPC_MOVN_D
= FOP(19, FMT_D
),
10659 OPC_SELEQZ_D
= FOP(20, FMT_D
),
10660 OPC_RECIP_D
= FOP(21, FMT_D
),
10661 OPC_RSQRT_D
= FOP(22, FMT_D
),
10662 OPC_SELNEZ_D
= FOP(23, FMT_D
),
10663 OPC_MADDF_D
= FOP(24, FMT_D
),
10664 OPC_MSUBF_D
= FOP(25, FMT_D
),
10665 OPC_RINT_D
= FOP(26, FMT_D
),
10666 OPC_CLASS_D
= FOP(27, FMT_D
),
10667 OPC_MIN_D
= FOP(28, FMT_D
),
10668 OPC_RECIP2_D
= FOP(28, FMT_D
),
10669 OPC_MINA_D
= FOP(29, FMT_D
),
10670 OPC_RECIP1_D
= FOP(29, FMT_D
),
10671 OPC_MAX_D
= FOP(30, FMT_D
),
10672 OPC_RSQRT1_D
= FOP(30, FMT_D
),
10673 OPC_MAXA_D
= FOP(31, FMT_D
),
10674 OPC_RSQRT2_D
= FOP(31, FMT_D
),
10675 OPC_CVT_S_D
= FOP(32, FMT_D
),
10676 OPC_CVT_W_D
= FOP(36, FMT_D
),
10677 OPC_CVT_L_D
= FOP(37, FMT_D
),
10678 OPC_CMP_F_D
= FOP(48, FMT_D
),
10679 OPC_CMP_UN_D
= FOP(49, FMT_D
),
10680 OPC_CMP_EQ_D
= FOP(50, FMT_D
),
10681 OPC_CMP_UEQ_D
= FOP(51, FMT_D
),
10682 OPC_CMP_OLT_D
= FOP(52, FMT_D
),
10683 OPC_CMP_ULT_D
= FOP(53, FMT_D
),
10684 OPC_CMP_OLE_D
= FOP(54, FMT_D
),
10685 OPC_CMP_ULE_D
= FOP(55, FMT_D
),
10686 OPC_CMP_SF_D
= FOP(56, FMT_D
),
10687 OPC_CMP_NGLE_D
= FOP(57, FMT_D
),
10688 OPC_CMP_SEQ_D
= FOP(58, FMT_D
),
10689 OPC_CMP_NGL_D
= FOP(59, FMT_D
),
10690 OPC_CMP_LT_D
= FOP(60, FMT_D
),
10691 OPC_CMP_NGE_D
= FOP(61, FMT_D
),
10692 OPC_CMP_LE_D
= FOP(62, FMT_D
),
10693 OPC_CMP_NGT_D
= FOP(63, FMT_D
),
10695 OPC_CVT_S_W
= FOP(32, FMT_W
),
10696 OPC_CVT_D_W
= FOP(33, FMT_W
),
10697 OPC_CVT_S_L
= FOP(32, FMT_L
),
10698 OPC_CVT_D_L
= FOP(33, FMT_L
),
10699 OPC_CVT_PS_PW
= FOP(38, FMT_W
),
10701 OPC_ADD_PS
= FOP(0, FMT_PS
),
10702 OPC_SUB_PS
= FOP(1, FMT_PS
),
10703 OPC_MUL_PS
= FOP(2, FMT_PS
),
10704 OPC_DIV_PS
= FOP(3, FMT_PS
),
10705 OPC_ABS_PS
= FOP(5, FMT_PS
),
10706 OPC_MOV_PS
= FOP(6, FMT_PS
),
10707 OPC_NEG_PS
= FOP(7, FMT_PS
),
10708 OPC_MOVCF_PS
= FOP(17, FMT_PS
),
10709 OPC_MOVZ_PS
= FOP(18, FMT_PS
),
10710 OPC_MOVN_PS
= FOP(19, FMT_PS
),
10711 OPC_ADDR_PS
= FOP(24, FMT_PS
),
10712 OPC_MULR_PS
= FOP(26, FMT_PS
),
10713 OPC_RECIP2_PS
= FOP(28, FMT_PS
),
10714 OPC_RECIP1_PS
= FOP(29, FMT_PS
),
10715 OPC_RSQRT1_PS
= FOP(30, FMT_PS
),
10716 OPC_RSQRT2_PS
= FOP(31, FMT_PS
),
10718 OPC_CVT_S_PU
= FOP(32, FMT_PS
),
10719 OPC_CVT_PW_PS
= FOP(36, FMT_PS
),
10720 OPC_CVT_S_PL
= FOP(40, FMT_PS
),
10721 OPC_PLL_PS
= FOP(44, FMT_PS
),
10722 OPC_PLU_PS
= FOP(45, FMT_PS
),
10723 OPC_PUL_PS
= FOP(46, FMT_PS
),
10724 OPC_PUU_PS
= FOP(47, FMT_PS
),
10725 OPC_CMP_F_PS
= FOP(48, FMT_PS
),
10726 OPC_CMP_UN_PS
= FOP(49, FMT_PS
),
10727 OPC_CMP_EQ_PS
= FOP(50, FMT_PS
),
10728 OPC_CMP_UEQ_PS
= FOP(51, FMT_PS
),
10729 OPC_CMP_OLT_PS
= FOP(52, FMT_PS
),
10730 OPC_CMP_ULT_PS
= FOP(53, FMT_PS
),
10731 OPC_CMP_OLE_PS
= FOP(54, FMT_PS
),
10732 OPC_CMP_ULE_PS
= FOP(55, FMT_PS
),
10733 OPC_CMP_SF_PS
= FOP(56, FMT_PS
),
10734 OPC_CMP_NGLE_PS
= FOP(57, FMT_PS
),
10735 OPC_CMP_SEQ_PS
= FOP(58, FMT_PS
),
10736 OPC_CMP_NGL_PS
= FOP(59, FMT_PS
),
10737 OPC_CMP_LT_PS
= FOP(60, FMT_PS
),
10738 OPC_CMP_NGE_PS
= FOP(61, FMT_PS
),
10739 OPC_CMP_LE_PS
= FOP(62, FMT_PS
),
10740 OPC_CMP_NGT_PS
= FOP(63, FMT_PS
),
10744 R6_OPC_CMP_AF_S
= FOP(0, FMT_W
),
10745 R6_OPC_CMP_UN_S
= FOP(1, FMT_W
),
10746 R6_OPC_CMP_EQ_S
= FOP(2, FMT_W
),
10747 R6_OPC_CMP_UEQ_S
= FOP(3, FMT_W
),
10748 R6_OPC_CMP_LT_S
= FOP(4, FMT_W
),
10749 R6_OPC_CMP_ULT_S
= FOP(5, FMT_W
),
10750 R6_OPC_CMP_LE_S
= FOP(6, FMT_W
),
10751 R6_OPC_CMP_ULE_S
= FOP(7, FMT_W
),
10752 R6_OPC_CMP_SAF_S
= FOP(8, FMT_W
),
10753 R6_OPC_CMP_SUN_S
= FOP(9, FMT_W
),
10754 R6_OPC_CMP_SEQ_S
= FOP(10, FMT_W
),
10755 R6_OPC_CMP_SEUQ_S
= FOP(11, FMT_W
),
10756 R6_OPC_CMP_SLT_S
= FOP(12, FMT_W
),
10757 R6_OPC_CMP_SULT_S
= FOP(13, FMT_W
),
10758 R6_OPC_CMP_SLE_S
= FOP(14, FMT_W
),
10759 R6_OPC_CMP_SULE_S
= FOP(15, FMT_W
),
10760 R6_OPC_CMP_OR_S
= FOP(17, FMT_W
),
10761 R6_OPC_CMP_UNE_S
= FOP(18, FMT_W
),
10762 R6_OPC_CMP_NE_S
= FOP(19, FMT_W
),
10763 R6_OPC_CMP_SOR_S
= FOP(25, FMT_W
),
10764 R6_OPC_CMP_SUNE_S
= FOP(26, FMT_W
),
10765 R6_OPC_CMP_SNE_S
= FOP(27, FMT_W
),
10767 R6_OPC_CMP_AF_D
= FOP(0, FMT_L
),
10768 R6_OPC_CMP_UN_D
= FOP(1, FMT_L
),
10769 R6_OPC_CMP_EQ_D
= FOP(2, FMT_L
),
10770 R6_OPC_CMP_UEQ_D
= FOP(3, FMT_L
),
10771 R6_OPC_CMP_LT_D
= FOP(4, FMT_L
),
10772 R6_OPC_CMP_ULT_D
= FOP(5, FMT_L
),
10773 R6_OPC_CMP_LE_D
= FOP(6, FMT_L
),
10774 R6_OPC_CMP_ULE_D
= FOP(7, FMT_L
),
10775 R6_OPC_CMP_SAF_D
= FOP(8, FMT_L
),
10776 R6_OPC_CMP_SUN_D
= FOP(9, FMT_L
),
10777 R6_OPC_CMP_SEQ_D
= FOP(10, FMT_L
),
10778 R6_OPC_CMP_SEUQ_D
= FOP(11, FMT_L
),
10779 R6_OPC_CMP_SLT_D
= FOP(12, FMT_L
),
10780 R6_OPC_CMP_SULT_D
= FOP(13, FMT_L
),
10781 R6_OPC_CMP_SLE_D
= FOP(14, FMT_L
),
10782 R6_OPC_CMP_SULE_D
= FOP(15, FMT_L
),
10783 R6_OPC_CMP_OR_D
= FOP(17, FMT_L
),
10784 R6_OPC_CMP_UNE_D
= FOP(18, FMT_L
),
10785 R6_OPC_CMP_NE_D
= FOP(19, FMT_L
),
10786 R6_OPC_CMP_SOR_D
= FOP(25, FMT_L
),
10787 R6_OPC_CMP_SUNE_D
= FOP(26, FMT_L
),
10788 R6_OPC_CMP_SNE_D
= FOP(27, FMT_L
),
10791 static void gen_cp1(DisasContext
*ctx
, uint32_t opc
, int rt
, int fs
)
10793 TCGv t0
= tcg_temp_new();
10798 TCGv_i32 fp0
= tcg_temp_new_i32();
10800 gen_load_fpr32(ctx
, fp0
, fs
);
10801 tcg_gen_ext_i32_tl(t0
, fp0
);
10802 tcg_temp_free_i32(fp0
);
10804 gen_store_gpr(t0
, rt
);
10807 gen_load_gpr(t0
, rt
);
10809 TCGv_i32 fp0
= tcg_temp_new_i32();
10811 tcg_gen_trunc_tl_i32(fp0
, t0
);
10812 gen_store_fpr32(ctx
, fp0
, fs
);
10813 tcg_temp_free_i32(fp0
);
10817 gen_helper_1e0i(cfc1
, t0
, fs
);
10818 gen_store_gpr(t0
, rt
);
10821 gen_load_gpr(t0
, rt
);
10822 save_cpu_state(ctx
, 0);
10824 TCGv_i32 fs_tmp
= tcg_const_i32(fs
);
10826 gen_helper_0e2i(ctc1
, t0
, fs_tmp
, rt
);
10827 tcg_temp_free_i32(fs_tmp
);
10829 /* Stop translation as we may have changed hflags */
10830 ctx
->base
.is_jmp
= DISAS_STOP
;
10832 #if defined(TARGET_MIPS64)
10834 gen_load_fpr64(ctx
, t0
, fs
);
10835 gen_store_gpr(t0
, rt
);
10838 gen_load_gpr(t0
, rt
);
10839 gen_store_fpr64(ctx
, t0
, fs
);
10844 TCGv_i32 fp0
= tcg_temp_new_i32();
10846 gen_load_fpr32h(ctx
, fp0
, fs
);
10847 tcg_gen_ext_i32_tl(t0
, fp0
);
10848 tcg_temp_free_i32(fp0
);
10850 gen_store_gpr(t0
, rt
);
10853 gen_load_gpr(t0
, rt
);
10855 TCGv_i32 fp0
= tcg_temp_new_i32();
10857 tcg_gen_trunc_tl_i32(fp0
, t0
);
10858 gen_store_fpr32h(ctx
, fp0
, fs
);
10859 tcg_temp_free_i32(fp0
);
10863 MIPS_INVAL("cp1 move");
10864 gen_reserved_instruction(ctx
);
10872 static void gen_movci(DisasContext
*ctx
, int rd
, int rs
, int cc
, int tf
)
10879 /* Treat as NOP. */
10884 cond
= TCG_COND_EQ
;
10886 cond
= TCG_COND_NE
;
10889 l1
= gen_new_label();
10890 t0
= tcg_temp_new_i32();
10891 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10892 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10893 tcg_temp_free_i32(t0
);
10895 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
10897 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
10902 static inline void gen_movcf_s(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10906 TCGv_i32 t0
= tcg_temp_new_i32();
10907 TCGLabel
*l1
= gen_new_label();
10910 cond
= TCG_COND_EQ
;
10912 cond
= TCG_COND_NE
;
10915 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10916 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10917 gen_load_fpr32(ctx
, t0
, fs
);
10918 gen_store_fpr32(ctx
, t0
, fd
);
10920 tcg_temp_free_i32(t0
);
10923 static inline void gen_movcf_d(DisasContext
*ctx
, int fs
, int fd
, int cc
,
10927 TCGv_i32 t0
= tcg_temp_new_i32();
10929 TCGLabel
*l1
= gen_new_label();
10932 cond
= TCG_COND_EQ
;
10934 cond
= TCG_COND_NE
;
10937 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10938 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10939 tcg_temp_free_i32(t0
);
10940 fp0
= tcg_temp_new_i64();
10941 gen_load_fpr64(ctx
, fp0
, fs
);
10942 gen_store_fpr64(ctx
, fp0
, fd
);
10943 tcg_temp_free_i64(fp0
);
10947 static inline void gen_movcf_ps(DisasContext
*ctx
, int fs
, int fd
,
10951 TCGv_i32 t0
= tcg_temp_new_i32();
10952 TCGLabel
*l1
= gen_new_label();
10953 TCGLabel
*l2
= gen_new_label();
10956 cond
= TCG_COND_EQ
;
10958 cond
= TCG_COND_NE
;
10961 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
));
10962 tcg_gen_brcondi_i32(cond
, t0
, 0, l1
);
10963 gen_load_fpr32(ctx
, t0
, fs
);
10964 gen_store_fpr32(ctx
, t0
, fd
);
10967 tcg_gen_andi_i32(t0
, fpu_fcr31
, 1 << get_fp_bit(cc
+ 1));
10968 tcg_gen_brcondi_i32(cond
, t0
, 0, l2
);
10969 gen_load_fpr32h(ctx
, t0
, fs
);
10970 gen_store_fpr32h(ctx
, t0
, fd
);
10971 tcg_temp_free_i32(t0
);
10975 static void gen_sel_s(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
10978 TCGv_i32 t1
= tcg_const_i32(0);
10979 TCGv_i32 fp0
= tcg_temp_new_i32();
10980 TCGv_i32 fp1
= tcg_temp_new_i32();
10981 TCGv_i32 fp2
= tcg_temp_new_i32();
10982 gen_load_fpr32(ctx
, fp0
, fd
);
10983 gen_load_fpr32(ctx
, fp1
, ft
);
10984 gen_load_fpr32(ctx
, fp2
, fs
);
10988 tcg_gen_andi_i32(fp0
, fp0
, 1);
10989 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
10992 tcg_gen_andi_i32(fp1
, fp1
, 1);
10993 tcg_gen_movcond_i32(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
10996 tcg_gen_andi_i32(fp1
, fp1
, 1);
10997 tcg_gen_movcond_i32(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
11000 MIPS_INVAL("gen_sel_s");
11001 gen_reserved_instruction(ctx
);
11005 gen_store_fpr32(ctx
, fp0
, fd
);
11006 tcg_temp_free_i32(fp2
);
11007 tcg_temp_free_i32(fp1
);
11008 tcg_temp_free_i32(fp0
);
11009 tcg_temp_free_i32(t1
);
11012 static void gen_sel_d(DisasContext
*ctx
, enum fopcode op1
, int fd
, int ft
,
11015 TCGv_i64 t1
= tcg_const_i64(0);
11016 TCGv_i64 fp0
= tcg_temp_new_i64();
11017 TCGv_i64 fp1
= tcg_temp_new_i64();
11018 TCGv_i64 fp2
= tcg_temp_new_i64();
11019 gen_load_fpr64(ctx
, fp0
, fd
);
11020 gen_load_fpr64(ctx
, fp1
, ft
);
11021 gen_load_fpr64(ctx
, fp2
, fs
);
11025 tcg_gen_andi_i64(fp0
, fp0
, 1);
11026 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp0
, t1
, fp1
, fp2
);
11029 tcg_gen_andi_i64(fp1
, fp1
, 1);
11030 tcg_gen_movcond_i64(TCG_COND_EQ
, fp0
, fp1
, t1
, fp2
, t1
);
11033 tcg_gen_andi_i64(fp1
, fp1
, 1);
11034 tcg_gen_movcond_i64(TCG_COND_NE
, fp0
, fp1
, t1
, fp2
, t1
);
11037 MIPS_INVAL("gen_sel_d");
11038 gen_reserved_instruction(ctx
);
11042 gen_store_fpr64(ctx
, fp0
, fd
);
11043 tcg_temp_free_i64(fp2
);
11044 tcg_temp_free_i64(fp1
);
11045 tcg_temp_free_i64(fp0
);
11046 tcg_temp_free_i64(t1
);
11049 static void gen_farith(DisasContext
*ctx
, enum fopcode op1
,
11050 int ft
, int fs
, int fd
, int cc
)
11052 uint32_t func
= ctx
->opcode
& 0x3f;
11056 TCGv_i32 fp0
= tcg_temp_new_i32();
11057 TCGv_i32 fp1
= tcg_temp_new_i32();
11059 gen_load_fpr32(ctx
, fp0
, fs
);
11060 gen_load_fpr32(ctx
, fp1
, ft
);
11061 gen_helper_float_add_s(fp0
, cpu_env
, fp0
, fp1
);
11062 tcg_temp_free_i32(fp1
);
11063 gen_store_fpr32(ctx
, fp0
, fd
);
11064 tcg_temp_free_i32(fp0
);
11069 TCGv_i32 fp0
= tcg_temp_new_i32();
11070 TCGv_i32 fp1
= tcg_temp_new_i32();
11072 gen_load_fpr32(ctx
, fp0
, fs
);
11073 gen_load_fpr32(ctx
, fp1
, ft
);
11074 gen_helper_float_sub_s(fp0
, cpu_env
, fp0
, fp1
);
11075 tcg_temp_free_i32(fp1
);
11076 gen_store_fpr32(ctx
, fp0
, fd
);
11077 tcg_temp_free_i32(fp0
);
11082 TCGv_i32 fp0
= tcg_temp_new_i32();
11083 TCGv_i32 fp1
= tcg_temp_new_i32();
11085 gen_load_fpr32(ctx
, fp0
, fs
);
11086 gen_load_fpr32(ctx
, fp1
, ft
);
11087 gen_helper_float_mul_s(fp0
, cpu_env
, fp0
, fp1
);
11088 tcg_temp_free_i32(fp1
);
11089 gen_store_fpr32(ctx
, fp0
, fd
);
11090 tcg_temp_free_i32(fp0
);
11095 TCGv_i32 fp0
= tcg_temp_new_i32();
11096 TCGv_i32 fp1
= tcg_temp_new_i32();
11098 gen_load_fpr32(ctx
, fp0
, fs
);
11099 gen_load_fpr32(ctx
, fp1
, ft
);
11100 gen_helper_float_div_s(fp0
, cpu_env
, fp0
, fp1
);
11101 tcg_temp_free_i32(fp1
);
11102 gen_store_fpr32(ctx
, fp0
, fd
);
11103 tcg_temp_free_i32(fp0
);
11108 TCGv_i32 fp0
= tcg_temp_new_i32();
11110 gen_load_fpr32(ctx
, fp0
, fs
);
11111 gen_helper_float_sqrt_s(fp0
, cpu_env
, fp0
);
11112 gen_store_fpr32(ctx
, fp0
, fd
);
11113 tcg_temp_free_i32(fp0
);
11118 TCGv_i32 fp0
= tcg_temp_new_i32();
11120 gen_load_fpr32(ctx
, fp0
, fs
);
11121 if (ctx
->abs2008
) {
11122 tcg_gen_andi_i32(fp0
, fp0
, 0x7fffffffUL
);
11124 gen_helper_float_abs_s(fp0
, fp0
);
11126 gen_store_fpr32(ctx
, fp0
, fd
);
11127 tcg_temp_free_i32(fp0
);
11132 TCGv_i32 fp0
= tcg_temp_new_i32();
11134 gen_load_fpr32(ctx
, fp0
, fs
);
11135 gen_store_fpr32(ctx
, fp0
, fd
);
11136 tcg_temp_free_i32(fp0
);
11141 TCGv_i32 fp0
= tcg_temp_new_i32();
11143 gen_load_fpr32(ctx
, fp0
, fs
);
11144 if (ctx
->abs2008
) {
11145 tcg_gen_xori_i32(fp0
, fp0
, 1UL << 31);
11147 gen_helper_float_chs_s(fp0
, fp0
);
11149 gen_store_fpr32(ctx
, fp0
, fd
);
11150 tcg_temp_free_i32(fp0
);
11153 case OPC_ROUND_L_S
:
11154 check_cp1_64bitmode(ctx
);
11156 TCGv_i32 fp32
= tcg_temp_new_i32();
11157 TCGv_i64 fp64
= tcg_temp_new_i64();
11159 gen_load_fpr32(ctx
, fp32
, fs
);
11160 if (ctx
->nan2008
) {
11161 gen_helper_float_round_2008_l_s(fp64
, cpu_env
, fp32
);
11163 gen_helper_float_round_l_s(fp64
, cpu_env
, fp32
);
11165 tcg_temp_free_i32(fp32
);
11166 gen_store_fpr64(ctx
, fp64
, fd
);
11167 tcg_temp_free_i64(fp64
);
11170 case OPC_TRUNC_L_S
:
11171 check_cp1_64bitmode(ctx
);
11173 TCGv_i32 fp32
= tcg_temp_new_i32();
11174 TCGv_i64 fp64
= tcg_temp_new_i64();
11176 gen_load_fpr32(ctx
, fp32
, fs
);
11177 if (ctx
->nan2008
) {
11178 gen_helper_float_trunc_2008_l_s(fp64
, cpu_env
, fp32
);
11180 gen_helper_float_trunc_l_s(fp64
, cpu_env
, fp32
);
11182 tcg_temp_free_i32(fp32
);
11183 gen_store_fpr64(ctx
, fp64
, fd
);
11184 tcg_temp_free_i64(fp64
);
11188 check_cp1_64bitmode(ctx
);
11190 TCGv_i32 fp32
= tcg_temp_new_i32();
11191 TCGv_i64 fp64
= tcg_temp_new_i64();
11193 gen_load_fpr32(ctx
, fp32
, fs
);
11194 if (ctx
->nan2008
) {
11195 gen_helper_float_ceil_2008_l_s(fp64
, cpu_env
, fp32
);
11197 gen_helper_float_ceil_l_s(fp64
, cpu_env
, fp32
);
11199 tcg_temp_free_i32(fp32
);
11200 gen_store_fpr64(ctx
, fp64
, fd
);
11201 tcg_temp_free_i64(fp64
);
11204 case OPC_FLOOR_L_S
:
11205 check_cp1_64bitmode(ctx
);
11207 TCGv_i32 fp32
= tcg_temp_new_i32();
11208 TCGv_i64 fp64
= tcg_temp_new_i64();
11210 gen_load_fpr32(ctx
, fp32
, fs
);
11211 if (ctx
->nan2008
) {
11212 gen_helper_float_floor_2008_l_s(fp64
, cpu_env
, fp32
);
11214 gen_helper_float_floor_l_s(fp64
, cpu_env
, fp32
);
11216 tcg_temp_free_i32(fp32
);
11217 gen_store_fpr64(ctx
, fp64
, fd
);
11218 tcg_temp_free_i64(fp64
);
11221 case OPC_ROUND_W_S
:
11223 TCGv_i32 fp0
= tcg_temp_new_i32();
11225 gen_load_fpr32(ctx
, fp0
, fs
);
11226 if (ctx
->nan2008
) {
11227 gen_helper_float_round_2008_w_s(fp0
, cpu_env
, fp0
);
11229 gen_helper_float_round_w_s(fp0
, cpu_env
, fp0
);
11231 gen_store_fpr32(ctx
, fp0
, fd
);
11232 tcg_temp_free_i32(fp0
);
11235 case OPC_TRUNC_W_S
:
11237 TCGv_i32 fp0
= tcg_temp_new_i32();
11239 gen_load_fpr32(ctx
, fp0
, fs
);
11240 if (ctx
->nan2008
) {
11241 gen_helper_float_trunc_2008_w_s(fp0
, cpu_env
, fp0
);
11243 gen_helper_float_trunc_w_s(fp0
, cpu_env
, fp0
);
11245 gen_store_fpr32(ctx
, fp0
, fd
);
11246 tcg_temp_free_i32(fp0
);
11251 TCGv_i32 fp0
= tcg_temp_new_i32();
11253 gen_load_fpr32(ctx
, fp0
, fs
);
11254 if (ctx
->nan2008
) {
11255 gen_helper_float_ceil_2008_w_s(fp0
, cpu_env
, fp0
);
11257 gen_helper_float_ceil_w_s(fp0
, cpu_env
, fp0
);
11259 gen_store_fpr32(ctx
, fp0
, fd
);
11260 tcg_temp_free_i32(fp0
);
11263 case OPC_FLOOR_W_S
:
11265 TCGv_i32 fp0
= tcg_temp_new_i32();
11267 gen_load_fpr32(ctx
, fp0
, fs
);
11268 if (ctx
->nan2008
) {
11269 gen_helper_float_floor_2008_w_s(fp0
, cpu_env
, fp0
);
11271 gen_helper_float_floor_w_s(fp0
, cpu_env
, fp0
);
11273 gen_store_fpr32(ctx
, fp0
, fd
);
11274 tcg_temp_free_i32(fp0
);
11278 check_insn(ctx
, ISA_MIPS_R6
);
11279 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11282 check_insn(ctx
, ISA_MIPS_R6
);
11283 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11286 check_insn(ctx
, ISA_MIPS_R6
);
11287 gen_sel_s(ctx
, op1
, fd
, ft
, fs
);
11290 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11291 gen_movcf_s(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11294 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11296 TCGLabel
*l1
= gen_new_label();
11300 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11302 fp0
= tcg_temp_new_i32();
11303 gen_load_fpr32(ctx
, fp0
, fs
);
11304 gen_store_fpr32(ctx
, fp0
, fd
);
11305 tcg_temp_free_i32(fp0
);
11310 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11312 TCGLabel
*l1
= gen_new_label();
11316 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11317 fp0
= tcg_temp_new_i32();
11318 gen_load_fpr32(ctx
, fp0
, fs
);
11319 gen_store_fpr32(ctx
, fp0
, fd
);
11320 tcg_temp_free_i32(fp0
);
11327 TCGv_i32 fp0
= tcg_temp_new_i32();
11329 gen_load_fpr32(ctx
, fp0
, fs
);
11330 gen_helper_float_recip_s(fp0
, cpu_env
, fp0
);
11331 gen_store_fpr32(ctx
, fp0
, fd
);
11332 tcg_temp_free_i32(fp0
);
11337 TCGv_i32 fp0
= tcg_temp_new_i32();
11339 gen_load_fpr32(ctx
, fp0
, fs
);
11340 gen_helper_float_rsqrt_s(fp0
, cpu_env
, fp0
);
11341 gen_store_fpr32(ctx
, fp0
, fd
);
11342 tcg_temp_free_i32(fp0
);
11346 check_insn(ctx
, ISA_MIPS_R6
);
11348 TCGv_i32 fp0
= tcg_temp_new_i32();
11349 TCGv_i32 fp1
= tcg_temp_new_i32();
11350 TCGv_i32 fp2
= tcg_temp_new_i32();
11351 gen_load_fpr32(ctx
, fp0
, fs
);
11352 gen_load_fpr32(ctx
, fp1
, ft
);
11353 gen_load_fpr32(ctx
, fp2
, fd
);
11354 gen_helper_float_maddf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11355 gen_store_fpr32(ctx
, fp2
, fd
);
11356 tcg_temp_free_i32(fp2
);
11357 tcg_temp_free_i32(fp1
);
11358 tcg_temp_free_i32(fp0
);
11362 check_insn(ctx
, ISA_MIPS_R6
);
11364 TCGv_i32 fp0
= tcg_temp_new_i32();
11365 TCGv_i32 fp1
= tcg_temp_new_i32();
11366 TCGv_i32 fp2
= tcg_temp_new_i32();
11367 gen_load_fpr32(ctx
, fp0
, fs
);
11368 gen_load_fpr32(ctx
, fp1
, ft
);
11369 gen_load_fpr32(ctx
, fp2
, fd
);
11370 gen_helper_float_msubf_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11371 gen_store_fpr32(ctx
, fp2
, fd
);
11372 tcg_temp_free_i32(fp2
);
11373 tcg_temp_free_i32(fp1
);
11374 tcg_temp_free_i32(fp0
);
11378 check_insn(ctx
, ISA_MIPS_R6
);
11380 TCGv_i32 fp0
= tcg_temp_new_i32();
11381 gen_load_fpr32(ctx
, fp0
, fs
);
11382 gen_helper_float_rint_s(fp0
, cpu_env
, fp0
);
11383 gen_store_fpr32(ctx
, fp0
, fd
);
11384 tcg_temp_free_i32(fp0
);
11388 check_insn(ctx
, ISA_MIPS_R6
);
11390 TCGv_i32 fp0
= tcg_temp_new_i32();
11391 gen_load_fpr32(ctx
, fp0
, fs
);
11392 gen_helper_float_class_s(fp0
, cpu_env
, fp0
);
11393 gen_store_fpr32(ctx
, fp0
, fd
);
11394 tcg_temp_free_i32(fp0
);
11397 case OPC_MIN_S
: /* OPC_RECIP2_S */
11398 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11400 TCGv_i32 fp0
= tcg_temp_new_i32();
11401 TCGv_i32 fp1
= tcg_temp_new_i32();
11402 TCGv_i32 fp2
= tcg_temp_new_i32();
11403 gen_load_fpr32(ctx
, fp0
, fs
);
11404 gen_load_fpr32(ctx
, fp1
, ft
);
11405 gen_helper_float_min_s(fp2
, cpu_env
, fp0
, fp1
);
11406 gen_store_fpr32(ctx
, fp2
, fd
);
11407 tcg_temp_free_i32(fp2
);
11408 tcg_temp_free_i32(fp1
);
11409 tcg_temp_free_i32(fp0
);
11412 check_cp1_64bitmode(ctx
);
11414 TCGv_i32 fp0
= tcg_temp_new_i32();
11415 TCGv_i32 fp1
= tcg_temp_new_i32();
11417 gen_load_fpr32(ctx
, fp0
, fs
);
11418 gen_load_fpr32(ctx
, fp1
, ft
);
11419 gen_helper_float_recip2_s(fp0
, cpu_env
, fp0
, fp1
);
11420 tcg_temp_free_i32(fp1
);
11421 gen_store_fpr32(ctx
, fp0
, fd
);
11422 tcg_temp_free_i32(fp0
);
11426 case OPC_MINA_S
: /* OPC_RECIP1_S */
11427 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11429 TCGv_i32 fp0
= tcg_temp_new_i32();
11430 TCGv_i32 fp1
= tcg_temp_new_i32();
11431 TCGv_i32 fp2
= tcg_temp_new_i32();
11432 gen_load_fpr32(ctx
, fp0
, fs
);
11433 gen_load_fpr32(ctx
, fp1
, ft
);
11434 gen_helper_float_mina_s(fp2
, cpu_env
, fp0
, fp1
);
11435 gen_store_fpr32(ctx
, fp2
, fd
);
11436 tcg_temp_free_i32(fp2
);
11437 tcg_temp_free_i32(fp1
);
11438 tcg_temp_free_i32(fp0
);
11441 check_cp1_64bitmode(ctx
);
11443 TCGv_i32 fp0
= tcg_temp_new_i32();
11445 gen_load_fpr32(ctx
, fp0
, fs
);
11446 gen_helper_float_recip1_s(fp0
, cpu_env
, fp0
);
11447 gen_store_fpr32(ctx
, fp0
, fd
);
11448 tcg_temp_free_i32(fp0
);
11452 case OPC_MAX_S
: /* OPC_RSQRT1_S */
11453 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11455 TCGv_i32 fp0
= tcg_temp_new_i32();
11456 TCGv_i32 fp1
= tcg_temp_new_i32();
11457 gen_load_fpr32(ctx
, fp0
, fs
);
11458 gen_load_fpr32(ctx
, fp1
, ft
);
11459 gen_helper_float_max_s(fp1
, cpu_env
, fp0
, fp1
);
11460 gen_store_fpr32(ctx
, fp1
, fd
);
11461 tcg_temp_free_i32(fp1
);
11462 tcg_temp_free_i32(fp0
);
11465 check_cp1_64bitmode(ctx
);
11467 TCGv_i32 fp0
= tcg_temp_new_i32();
11469 gen_load_fpr32(ctx
, fp0
, fs
);
11470 gen_helper_float_rsqrt1_s(fp0
, cpu_env
, fp0
);
11471 gen_store_fpr32(ctx
, fp0
, fd
);
11472 tcg_temp_free_i32(fp0
);
11476 case OPC_MAXA_S
: /* OPC_RSQRT2_S */
11477 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11479 TCGv_i32 fp0
= tcg_temp_new_i32();
11480 TCGv_i32 fp1
= tcg_temp_new_i32();
11481 gen_load_fpr32(ctx
, fp0
, fs
);
11482 gen_load_fpr32(ctx
, fp1
, ft
);
11483 gen_helper_float_maxa_s(fp1
, cpu_env
, fp0
, fp1
);
11484 gen_store_fpr32(ctx
, fp1
, fd
);
11485 tcg_temp_free_i32(fp1
);
11486 tcg_temp_free_i32(fp0
);
11489 check_cp1_64bitmode(ctx
);
11491 TCGv_i32 fp0
= tcg_temp_new_i32();
11492 TCGv_i32 fp1
= tcg_temp_new_i32();
11494 gen_load_fpr32(ctx
, fp0
, fs
);
11495 gen_load_fpr32(ctx
, fp1
, ft
);
11496 gen_helper_float_rsqrt2_s(fp0
, cpu_env
, fp0
, fp1
);
11497 tcg_temp_free_i32(fp1
);
11498 gen_store_fpr32(ctx
, fp0
, fd
);
11499 tcg_temp_free_i32(fp0
);
11504 check_cp1_registers(ctx
, fd
);
11506 TCGv_i32 fp32
= tcg_temp_new_i32();
11507 TCGv_i64 fp64
= tcg_temp_new_i64();
11509 gen_load_fpr32(ctx
, fp32
, fs
);
11510 gen_helper_float_cvtd_s(fp64
, cpu_env
, fp32
);
11511 tcg_temp_free_i32(fp32
);
11512 gen_store_fpr64(ctx
, fp64
, fd
);
11513 tcg_temp_free_i64(fp64
);
11518 TCGv_i32 fp0
= tcg_temp_new_i32();
11520 gen_load_fpr32(ctx
, fp0
, fs
);
11521 if (ctx
->nan2008
) {
11522 gen_helper_float_cvt_2008_w_s(fp0
, cpu_env
, fp0
);
11524 gen_helper_float_cvt_w_s(fp0
, cpu_env
, fp0
);
11526 gen_store_fpr32(ctx
, fp0
, fd
);
11527 tcg_temp_free_i32(fp0
);
11531 check_cp1_64bitmode(ctx
);
11533 TCGv_i32 fp32
= tcg_temp_new_i32();
11534 TCGv_i64 fp64
= tcg_temp_new_i64();
11536 gen_load_fpr32(ctx
, fp32
, fs
);
11537 if (ctx
->nan2008
) {
11538 gen_helper_float_cvt_2008_l_s(fp64
, cpu_env
, fp32
);
11540 gen_helper_float_cvt_l_s(fp64
, cpu_env
, fp32
);
11542 tcg_temp_free_i32(fp32
);
11543 gen_store_fpr64(ctx
, fp64
, fd
);
11544 tcg_temp_free_i64(fp64
);
11550 TCGv_i64 fp64
= tcg_temp_new_i64();
11551 TCGv_i32 fp32_0
= tcg_temp_new_i32();
11552 TCGv_i32 fp32_1
= tcg_temp_new_i32();
11554 gen_load_fpr32(ctx
, fp32_0
, fs
);
11555 gen_load_fpr32(ctx
, fp32_1
, ft
);
11556 tcg_gen_concat_i32_i64(fp64
, fp32_1
, fp32_0
);
11557 tcg_temp_free_i32(fp32_1
);
11558 tcg_temp_free_i32(fp32_0
);
11559 gen_store_fpr64(ctx
, fp64
, fd
);
11560 tcg_temp_free_i64(fp64
);
11566 case OPC_CMP_UEQ_S
:
11567 case OPC_CMP_OLT_S
:
11568 case OPC_CMP_ULT_S
:
11569 case OPC_CMP_OLE_S
:
11570 case OPC_CMP_ULE_S
:
11572 case OPC_CMP_NGLE_S
:
11573 case OPC_CMP_SEQ_S
:
11574 case OPC_CMP_NGL_S
:
11576 case OPC_CMP_NGE_S
:
11578 case OPC_CMP_NGT_S
:
11579 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11580 if (ctx
->opcode
& (1 << 6)) {
11581 gen_cmpabs_s(ctx
, func
- 48, ft
, fs
, cc
);
11583 gen_cmp_s(ctx
, func
- 48, ft
, fs
, cc
);
11587 check_cp1_registers(ctx
, fs
| ft
| fd
);
11589 TCGv_i64 fp0
= tcg_temp_new_i64();
11590 TCGv_i64 fp1
= tcg_temp_new_i64();
11592 gen_load_fpr64(ctx
, fp0
, fs
);
11593 gen_load_fpr64(ctx
, fp1
, ft
);
11594 gen_helper_float_add_d(fp0
, cpu_env
, fp0
, fp1
);
11595 tcg_temp_free_i64(fp1
);
11596 gen_store_fpr64(ctx
, fp0
, fd
);
11597 tcg_temp_free_i64(fp0
);
11601 check_cp1_registers(ctx
, fs
| ft
| fd
);
11603 TCGv_i64 fp0
= tcg_temp_new_i64();
11604 TCGv_i64 fp1
= tcg_temp_new_i64();
11606 gen_load_fpr64(ctx
, fp0
, fs
);
11607 gen_load_fpr64(ctx
, fp1
, ft
);
11608 gen_helper_float_sub_d(fp0
, cpu_env
, fp0
, fp1
);
11609 tcg_temp_free_i64(fp1
);
11610 gen_store_fpr64(ctx
, fp0
, fd
);
11611 tcg_temp_free_i64(fp0
);
11615 check_cp1_registers(ctx
, fs
| ft
| fd
);
11617 TCGv_i64 fp0
= tcg_temp_new_i64();
11618 TCGv_i64 fp1
= tcg_temp_new_i64();
11620 gen_load_fpr64(ctx
, fp0
, fs
);
11621 gen_load_fpr64(ctx
, fp1
, ft
);
11622 gen_helper_float_mul_d(fp0
, cpu_env
, fp0
, fp1
);
11623 tcg_temp_free_i64(fp1
);
11624 gen_store_fpr64(ctx
, fp0
, fd
);
11625 tcg_temp_free_i64(fp0
);
11629 check_cp1_registers(ctx
, fs
| ft
| fd
);
11631 TCGv_i64 fp0
= tcg_temp_new_i64();
11632 TCGv_i64 fp1
= tcg_temp_new_i64();
11634 gen_load_fpr64(ctx
, fp0
, fs
);
11635 gen_load_fpr64(ctx
, fp1
, ft
);
11636 gen_helper_float_div_d(fp0
, cpu_env
, fp0
, fp1
);
11637 tcg_temp_free_i64(fp1
);
11638 gen_store_fpr64(ctx
, fp0
, fd
);
11639 tcg_temp_free_i64(fp0
);
11643 check_cp1_registers(ctx
, fs
| fd
);
11645 TCGv_i64 fp0
= tcg_temp_new_i64();
11647 gen_load_fpr64(ctx
, fp0
, fs
);
11648 gen_helper_float_sqrt_d(fp0
, cpu_env
, fp0
);
11649 gen_store_fpr64(ctx
, fp0
, fd
);
11650 tcg_temp_free_i64(fp0
);
11654 check_cp1_registers(ctx
, fs
| fd
);
11656 TCGv_i64 fp0
= tcg_temp_new_i64();
11658 gen_load_fpr64(ctx
, fp0
, fs
);
11659 if (ctx
->abs2008
) {
11660 tcg_gen_andi_i64(fp0
, fp0
, 0x7fffffffffffffffULL
);
11662 gen_helper_float_abs_d(fp0
, fp0
);
11664 gen_store_fpr64(ctx
, fp0
, fd
);
11665 tcg_temp_free_i64(fp0
);
11669 check_cp1_registers(ctx
, fs
| fd
);
11671 TCGv_i64 fp0
= tcg_temp_new_i64();
11673 gen_load_fpr64(ctx
, fp0
, fs
);
11674 gen_store_fpr64(ctx
, fp0
, fd
);
11675 tcg_temp_free_i64(fp0
);
11679 check_cp1_registers(ctx
, fs
| fd
);
11681 TCGv_i64 fp0
= tcg_temp_new_i64();
11683 gen_load_fpr64(ctx
, fp0
, fs
);
11684 if (ctx
->abs2008
) {
11685 tcg_gen_xori_i64(fp0
, fp0
, 1ULL << 63);
11687 gen_helper_float_chs_d(fp0
, fp0
);
11689 gen_store_fpr64(ctx
, fp0
, fd
);
11690 tcg_temp_free_i64(fp0
);
11693 case OPC_ROUND_L_D
:
11694 check_cp1_64bitmode(ctx
);
11696 TCGv_i64 fp0
= tcg_temp_new_i64();
11698 gen_load_fpr64(ctx
, fp0
, fs
);
11699 if (ctx
->nan2008
) {
11700 gen_helper_float_round_2008_l_d(fp0
, cpu_env
, fp0
);
11702 gen_helper_float_round_l_d(fp0
, cpu_env
, fp0
);
11704 gen_store_fpr64(ctx
, fp0
, fd
);
11705 tcg_temp_free_i64(fp0
);
11708 case OPC_TRUNC_L_D
:
11709 check_cp1_64bitmode(ctx
);
11711 TCGv_i64 fp0
= tcg_temp_new_i64();
11713 gen_load_fpr64(ctx
, fp0
, fs
);
11714 if (ctx
->nan2008
) {
11715 gen_helper_float_trunc_2008_l_d(fp0
, cpu_env
, fp0
);
11717 gen_helper_float_trunc_l_d(fp0
, cpu_env
, fp0
);
11719 gen_store_fpr64(ctx
, fp0
, fd
);
11720 tcg_temp_free_i64(fp0
);
11724 check_cp1_64bitmode(ctx
);
11726 TCGv_i64 fp0
= tcg_temp_new_i64();
11728 gen_load_fpr64(ctx
, fp0
, fs
);
11729 if (ctx
->nan2008
) {
11730 gen_helper_float_ceil_2008_l_d(fp0
, cpu_env
, fp0
);
11732 gen_helper_float_ceil_l_d(fp0
, cpu_env
, fp0
);
11734 gen_store_fpr64(ctx
, fp0
, fd
);
11735 tcg_temp_free_i64(fp0
);
11738 case OPC_FLOOR_L_D
:
11739 check_cp1_64bitmode(ctx
);
11741 TCGv_i64 fp0
= tcg_temp_new_i64();
11743 gen_load_fpr64(ctx
, fp0
, fs
);
11744 if (ctx
->nan2008
) {
11745 gen_helper_float_floor_2008_l_d(fp0
, cpu_env
, fp0
);
11747 gen_helper_float_floor_l_d(fp0
, cpu_env
, fp0
);
11749 gen_store_fpr64(ctx
, fp0
, fd
);
11750 tcg_temp_free_i64(fp0
);
11753 case OPC_ROUND_W_D
:
11754 check_cp1_registers(ctx
, fs
);
11756 TCGv_i32 fp32
= tcg_temp_new_i32();
11757 TCGv_i64 fp64
= tcg_temp_new_i64();
11759 gen_load_fpr64(ctx
, fp64
, fs
);
11760 if (ctx
->nan2008
) {
11761 gen_helper_float_round_2008_w_d(fp32
, cpu_env
, fp64
);
11763 gen_helper_float_round_w_d(fp32
, cpu_env
, fp64
);
11765 tcg_temp_free_i64(fp64
);
11766 gen_store_fpr32(ctx
, fp32
, fd
);
11767 tcg_temp_free_i32(fp32
);
11770 case OPC_TRUNC_W_D
:
11771 check_cp1_registers(ctx
, fs
);
11773 TCGv_i32 fp32
= tcg_temp_new_i32();
11774 TCGv_i64 fp64
= tcg_temp_new_i64();
11776 gen_load_fpr64(ctx
, fp64
, fs
);
11777 if (ctx
->nan2008
) {
11778 gen_helper_float_trunc_2008_w_d(fp32
, cpu_env
, fp64
);
11780 gen_helper_float_trunc_w_d(fp32
, cpu_env
, fp64
);
11782 tcg_temp_free_i64(fp64
);
11783 gen_store_fpr32(ctx
, fp32
, fd
);
11784 tcg_temp_free_i32(fp32
);
11788 check_cp1_registers(ctx
, fs
);
11790 TCGv_i32 fp32
= tcg_temp_new_i32();
11791 TCGv_i64 fp64
= tcg_temp_new_i64();
11793 gen_load_fpr64(ctx
, fp64
, fs
);
11794 if (ctx
->nan2008
) {
11795 gen_helper_float_ceil_2008_w_d(fp32
, cpu_env
, fp64
);
11797 gen_helper_float_ceil_w_d(fp32
, cpu_env
, fp64
);
11799 tcg_temp_free_i64(fp64
);
11800 gen_store_fpr32(ctx
, fp32
, fd
);
11801 tcg_temp_free_i32(fp32
);
11804 case OPC_FLOOR_W_D
:
11805 check_cp1_registers(ctx
, fs
);
11807 TCGv_i32 fp32
= tcg_temp_new_i32();
11808 TCGv_i64 fp64
= tcg_temp_new_i64();
11810 gen_load_fpr64(ctx
, fp64
, fs
);
11811 if (ctx
->nan2008
) {
11812 gen_helper_float_floor_2008_w_d(fp32
, cpu_env
, fp64
);
11814 gen_helper_float_floor_w_d(fp32
, cpu_env
, fp64
);
11816 tcg_temp_free_i64(fp64
);
11817 gen_store_fpr32(ctx
, fp32
, fd
);
11818 tcg_temp_free_i32(fp32
);
11822 check_insn(ctx
, ISA_MIPS_R6
);
11823 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11826 check_insn(ctx
, ISA_MIPS_R6
);
11827 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11830 check_insn(ctx
, ISA_MIPS_R6
);
11831 gen_sel_d(ctx
, op1
, fd
, ft
, fs
);
11834 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11835 gen_movcf_d(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
11838 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11840 TCGLabel
*l1
= gen_new_label();
11844 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
11846 fp0
= tcg_temp_new_i64();
11847 gen_load_fpr64(ctx
, fp0
, fs
);
11848 gen_store_fpr64(ctx
, fp0
, fd
);
11849 tcg_temp_free_i64(fp0
);
11854 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
11856 TCGLabel
*l1
= gen_new_label();
11860 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
11861 fp0
= tcg_temp_new_i64();
11862 gen_load_fpr64(ctx
, fp0
, fs
);
11863 gen_store_fpr64(ctx
, fp0
, fd
);
11864 tcg_temp_free_i64(fp0
);
11870 check_cp1_registers(ctx
, fs
| fd
);
11872 TCGv_i64 fp0
= tcg_temp_new_i64();
11874 gen_load_fpr64(ctx
, fp0
, fs
);
11875 gen_helper_float_recip_d(fp0
, cpu_env
, fp0
);
11876 gen_store_fpr64(ctx
, fp0
, fd
);
11877 tcg_temp_free_i64(fp0
);
11881 check_cp1_registers(ctx
, fs
| fd
);
11883 TCGv_i64 fp0
= tcg_temp_new_i64();
11885 gen_load_fpr64(ctx
, fp0
, fs
);
11886 gen_helper_float_rsqrt_d(fp0
, cpu_env
, fp0
);
11887 gen_store_fpr64(ctx
, fp0
, fd
);
11888 tcg_temp_free_i64(fp0
);
11892 check_insn(ctx
, ISA_MIPS_R6
);
11894 TCGv_i64 fp0
= tcg_temp_new_i64();
11895 TCGv_i64 fp1
= tcg_temp_new_i64();
11896 TCGv_i64 fp2
= tcg_temp_new_i64();
11897 gen_load_fpr64(ctx
, fp0
, fs
);
11898 gen_load_fpr64(ctx
, fp1
, ft
);
11899 gen_load_fpr64(ctx
, fp2
, fd
);
11900 gen_helper_float_maddf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11901 gen_store_fpr64(ctx
, fp2
, fd
);
11902 tcg_temp_free_i64(fp2
);
11903 tcg_temp_free_i64(fp1
);
11904 tcg_temp_free_i64(fp0
);
11908 check_insn(ctx
, ISA_MIPS_R6
);
11910 TCGv_i64 fp0
= tcg_temp_new_i64();
11911 TCGv_i64 fp1
= tcg_temp_new_i64();
11912 TCGv_i64 fp2
= tcg_temp_new_i64();
11913 gen_load_fpr64(ctx
, fp0
, fs
);
11914 gen_load_fpr64(ctx
, fp1
, ft
);
11915 gen_load_fpr64(ctx
, fp2
, fd
);
11916 gen_helper_float_msubf_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
11917 gen_store_fpr64(ctx
, fp2
, fd
);
11918 tcg_temp_free_i64(fp2
);
11919 tcg_temp_free_i64(fp1
);
11920 tcg_temp_free_i64(fp0
);
11924 check_insn(ctx
, ISA_MIPS_R6
);
11926 TCGv_i64 fp0
= tcg_temp_new_i64();
11927 gen_load_fpr64(ctx
, fp0
, fs
);
11928 gen_helper_float_rint_d(fp0
, cpu_env
, fp0
);
11929 gen_store_fpr64(ctx
, fp0
, fd
);
11930 tcg_temp_free_i64(fp0
);
11934 check_insn(ctx
, ISA_MIPS_R6
);
11936 TCGv_i64 fp0
= tcg_temp_new_i64();
11937 gen_load_fpr64(ctx
, fp0
, fs
);
11938 gen_helper_float_class_d(fp0
, cpu_env
, fp0
);
11939 gen_store_fpr64(ctx
, fp0
, fd
);
11940 tcg_temp_free_i64(fp0
);
11943 case OPC_MIN_D
: /* OPC_RECIP2_D */
11944 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11946 TCGv_i64 fp0
= tcg_temp_new_i64();
11947 TCGv_i64 fp1
= tcg_temp_new_i64();
11948 gen_load_fpr64(ctx
, fp0
, fs
);
11949 gen_load_fpr64(ctx
, fp1
, ft
);
11950 gen_helper_float_min_d(fp1
, cpu_env
, fp0
, fp1
);
11951 gen_store_fpr64(ctx
, fp1
, fd
);
11952 tcg_temp_free_i64(fp1
);
11953 tcg_temp_free_i64(fp0
);
11956 check_cp1_64bitmode(ctx
);
11958 TCGv_i64 fp0
= tcg_temp_new_i64();
11959 TCGv_i64 fp1
= tcg_temp_new_i64();
11961 gen_load_fpr64(ctx
, fp0
, fs
);
11962 gen_load_fpr64(ctx
, fp1
, ft
);
11963 gen_helper_float_recip2_d(fp0
, cpu_env
, fp0
, fp1
);
11964 tcg_temp_free_i64(fp1
);
11965 gen_store_fpr64(ctx
, fp0
, fd
);
11966 tcg_temp_free_i64(fp0
);
11970 case OPC_MINA_D
: /* OPC_RECIP1_D */
11971 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11973 TCGv_i64 fp0
= tcg_temp_new_i64();
11974 TCGv_i64 fp1
= tcg_temp_new_i64();
11975 gen_load_fpr64(ctx
, fp0
, fs
);
11976 gen_load_fpr64(ctx
, fp1
, ft
);
11977 gen_helper_float_mina_d(fp1
, cpu_env
, fp0
, fp1
);
11978 gen_store_fpr64(ctx
, fp1
, fd
);
11979 tcg_temp_free_i64(fp1
);
11980 tcg_temp_free_i64(fp0
);
11983 check_cp1_64bitmode(ctx
);
11985 TCGv_i64 fp0
= tcg_temp_new_i64();
11987 gen_load_fpr64(ctx
, fp0
, fs
);
11988 gen_helper_float_recip1_d(fp0
, cpu_env
, fp0
);
11989 gen_store_fpr64(ctx
, fp0
, fd
);
11990 tcg_temp_free_i64(fp0
);
11994 case OPC_MAX_D
: /* OPC_RSQRT1_D */
11995 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
11997 TCGv_i64 fp0
= tcg_temp_new_i64();
11998 TCGv_i64 fp1
= tcg_temp_new_i64();
11999 gen_load_fpr64(ctx
, fp0
, fs
);
12000 gen_load_fpr64(ctx
, fp1
, ft
);
12001 gen_helper_float_max_d(fp1
, cpu_env
, fp0
, fp1
);
12002 gen_store_fpr64(ctx
, fp1
, fd
);
12003 tcg_temp_free_i64(fp1
);
12004 tcg_temp_free_i64(fp0
);
12007 check_cp1_64bitmode(ctx
);
12009 TCGv_i64 fp0
= tcg_temp_new_i64();
12011 gen_load_fpr64(ctx
, fp0
, fs
);
12012 gen_helper_float_rsqrt1_d(fp0
, cpu_env
, fp0
);
12013 gen_store_fpr64(ctx
, fp0
, fd
);
12014 tcg_temp_free_i64(fp0
);
12018 case OPC_MAXA_D
: /* OPC_RSQRT2_D */
12019 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
12021 TCGv_i64 fp0
= tcg_temp_new_i64();
12022 TCGv_i64 fp1
= tcg_temp_new_i64();
12023 gen_load_fpr64(ctx
, fp0
, fs
);
12024 gen_load_fpr64(ctx
, fp1
, ft
);
12025 gen_helper_float_maxa_d(fp1
, cpu_env
, fp0
, fp1
);
12026 gen_store_fpr64(ctx
, fp1
, fd
);
12027 tcg_temp_free_i64(fp1
);
12028 tcg_temp_free_i64(fp0
);
12031 check_cp1_64bitmode(ctx
);
12033 TCGv_i64 fp0
= tcg_temp_new_i64();
12034 TCGv_i64 fp1
= tcg_temp_new_i64();
12036 gen_load_fpr64(ctx
, fp0
, fs
);
12037 gen_load_fpr64(ctx
, fp1
, ft
);
12038 gen_helper_float_rsqrt2_d(fp0
, cpu_env
, fp0
, fp1
);
12039 tcg_temp_free_i64(fp1
);
12040 gen_store_fpr64(ctx
, fp0
, fd
);
12041 tcg_temp_free_i64(fp0
);
12048 case OPC_CMP_UEQ_D
:
12049 case OPC_CMP_OLT_D
:
12050 case OPC_CMP_ULT_D
:
12051 case OPC_CMP_OLE_D
:
12052 case OPC_CMP_ULE_D
:
12054 case OPC_CMP_NGLE_D
:
12055 case OPC_CMP_SEQ_D
:
12056 case OPC_CMP_NGL_D
:
12058 case OPC_CMP_NGE_D
:
12060 case OPC_CMP_NGT_D
:
12061 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
12062 if (ctx
->opcode
& (1 << 6)) {
12063 gen_cmpabs_d(ctx
, func
- 48, ft
, fs
, cc
);
12065 gen_cmp_d(ctx
, func
- 48, ft
, fs
, cc
);
12069 check_cp1_registers(ctx
, fs
);
12071 TCGv_i32 fp32
= tcg_temp_new_i32();
12072 TCGv_i64 fp64
= tcg_temp_new_i64();
12074 gen_load_fpr64(ctx
, fp64
, fs
);
12075 gen_helper_float_cvts_d(fp32
, cpu_env
, fp64
);
12076 tcg_temp_free_i64(fp64
);
12077 gen_store_fpr32(ctx
, fp32
, fd
);
12078 tcg_temp_free_i32(fp32
);
12082 check_cp1_registers(ctx
, fs
);
12084 TCGv_i32 fp32
= tcg_temp_new_i32();
12085 TCGv_i64 fp64
= tcg_temp_new_i64();
12087 gen_load_fpr64(ctx
, fp64
, fs
);
12088 if (ctx
->nan2008
) {
12089 gen_helper_float_cvt_2008_w_d(fp32
, cpu_env
, fp64
);
12091 gen_helper_float_cvt_w_d(fp32
, cpu_env
, fp64
);
12093 tcg_temp_free_i64(fp64
);
12094 gen_store_fpr32(ctx
, fp32
, fd
);
12095 tcg_temp_free_i32(fp32
);
12099 check_cp1_64bitmode(ctx
);
12101 TCGv_i64 fp0
= tcg_temp_new_i64();
12103 gen_load_fpr64(ctx
, fp0
, fs
);
12104 if (ctx
->nan2008
) {
12105 gen_helper_float_cvt_2008_l_d(fp0
, cpu_env
, fp0
);
12107 gen_helper_float_cvt_l_d(fp0
, cpu_env
, fp0
);
12109 gen_store_fpr64(ctx
, fp0
, fd
);
12110 tcg_temp_free_i64(fp0
);
12115 TCGv_i32 fp0
= tcg_temp_new_i32();
12117 gen_load_fpr32(ctx
, fp0
, fs
);
12118 gen_helper_float_cvts_w(fp0
, cpu_env
, fp0
);
12119 gen_store_fpr32(ctx
, fp0
, fd
);
12120 tcg_temp_free_i32(fp0
);
12124 check_cp1_registers(ctx
, fd
);
12126 TCGv_i32 fp32
= tcg_temp_new_i32();
12127 TCGv_i64 fp64
= tcg_temp_new_i64();
12129 gen_load_fpr32(ctx
, fp32
, fs
);
12130 gen_helper_float_cvtd_w(fp64
, cpu_env
, fp32
);
12131 tcg_temp_free_i32(fp32
);
12132 gen_store_fpr64(ctx
, fp64
, fd
);
12133 tcg_temp_free_i64(fp64
);
12137 check_cp1_64bitmode(ctx
);
12139 TCGv_i32 fp32
= tcg_temp_new_i32();
12140 TCGv_i64 fp64
= tcg_temp_new_i64();
12142 gen_load_fpr64(ctx
, fp64
, fs
);
12143 gen_helper_float_cvts_l(fp32
, cpu_env
, fp64
);
12144 tcg_temp_free_i64(fp64
);
12145 gen_store_fpr32(ctx
, fp32
, fd
);
12146 tcg_temp_free_i32(fp32
);
12150 check_cp1_64bitmode(ctx
);
12152 TCGv_i64 fp0
= tcg_temp_new_i64();
12154 gen_load_fpr64(ctx
, fp0
, fs
);
12155 gen_helper_float_cvtd_l(fp0
, cpu_env
, fp0
);
12156 gen_store_fpr64(ctx
, fp0
, fd
);
12157 tcg_temp_free_i64(fp0
);
12160 case OPC_CVT_PS_PW
:
12163 TCGv_i64 fp0
= tcg_temp_new_i64();
12165 gen_load_fpr64(ctx
, fp0
, fs
);
12166 gen_helper_float_cvtps_pw(fp0
, cpu_env
, fp0
);
12167 gen_store_fpr64(ctx
, fp0
, fd
);
12168 tcg_temp_free_i64(fp0
);
12174 TCGv_i64 fp0
= tcg_temp_new_i64();
12175 TCGv_i64 fp1
= tcg_temp_new_i64();
12177 gen_load_fpr64(ctx
, fp0
, fs
);
12178 gen_load_fpr64(ctx
, fp1
, ft
);
12179 gen_helper_float_add_ps(fp0
, cpu_env
, fp0
, fp1
);
12180 tcg_temp_free_i64(fp1
);
12181 gen_store_fpr64(ctx
, fp0
, fd
);
12182 tcg_temp_free_i64(fp0
);
12188 TCGv_i64 fp0
= tcg_temp_new_i64();
12189 TCGv_i64 fp1
= tcg_temp_new_i64();
12191 gen_load_fpr64(ctx
, fp0
, fs
);
12192 gen_load_fpr64(ctx
, fp1
, ft
);
12193 gen_helper_float_sub_ps(fp0
, cpu_env
, fp0
, fp1
);
12194 tcg_temp_free_i64(fp1
);
12195 gen_store_fpr64(ctx
, fp0
, fd
);
12196 tcg_temp_free_i64(fp0
);
12202 TCGv_i64 fp0
= tcg_temp_new_i64();
12203 TCGv_i64 fp1
= tcg_temp_new_i64();
12205 gen_load_fpr64(ctx
, fp0
, fs
);
12206 gen_load_fpr64(ctx
, fp1
, ft
);
12207 gen_helper_float_mul_ps(fp0
, cpu_env
, fp0
, fp1
);
12208 tcg_temp_free_i64(fp1
);
12209 gen_store_fpr64(ctx
, fp0
, fd
);
12210 tcg_temp_free_i64(fp0
);
12216 TCGv_i64 fp0
= tcg_temp_new_i64();
12218 gen_load_fpr64(ctx
, fp0
, fs
);
12219 gen_helper_float_abs_ps(fp0
, fp0
);
12220 gen_store_fpr64(ctx
, fp0
, fd
);
12221 tcg_temp_free_i64(fp0
);
12227 TCGv_i64 fp0
= tcg_temp_new_i64();
12229 gen_load_fpr64(ctx
, fp0
, fs
);
12230 gen_store_fpr64(ctx
, fp0
, fd
);
12231 tcg_temp_free_i64(fp0
);
12237 TCGv_i64 fp0
= tcg_temp_new_i64();
12239 gen_load_fpr64(ctx
, fp0
, fs
);
12240 gen_helper_float_chs_ps(fp0
, fp0
);
12241 gen_store_fpr64(ctx
, fp0
, fd
);
12242 tcg_temp_free_i64(fp0
);
12247 gen_movcf_ps(ctx
, fs
, fd
, (ft
>> 2) & 0x7, ft
& 0x1);
12252 TCGLabel
*l1
= gen_new_label();
12256 tcg_gen_brcondi_tl(TCG_COND_NE
, cpu_gpr
[ft
], 0, l1
);
12258 fp0
= tcg_temp_new_i64();
12259 gen_load_fpr64(ctx
, fp0
, fs
);
12260 gen_store_fpr64(ctx
, fp0
, fd
);
12261 tcg_temp_free_i64(fp0
);
12268 TCGLabel
*l1
= gen_new_label();
12272 tcg_gen_brcondi_tl(TCG_COND_EQ
, cpu_gpr
[ft
], 0, l1
);
12273 fp0
= tcg_temp_new_i64();
12274 gen_load_fpr64(ctx
, fp0
, fs
);
12275 gen_store_fpr64(ctx
, fp0
, fd
);
12276 tcg_temp_free_i64(fp0
);
12284 TCGv_i64 fp0
= tcg_temp_new_i64();
12285 TCGv_i64 fp1
= tcg_temp_new_i64();
12287 gen_load_fpr64(ctx
, fp0
, ft
);
12288 gen_load_fpr64(ctx
, fp1
, fs
);
12289 gen_helper_float_addr_ps(fp0
, cpu_env
, fp0
, fp1
);
12290 tcg_temp_free_i64(fp1
);
12291 gen_store_fpr64(ctx
, fp0
, fd
);
12292 tcg_temp_free_i64(fp0
);
12298 TCGv_i64 fp0
= tcg_temp_new_i64();
12299 TCGv_i64 fp1
= tcg_temp_new_i64();
12301 gen_load_fpr64(ctx
, fp0
, ft
);
12302 gen_load_fpr64(ctx
, fp1
, fs
);
12303 gen_helper_float_mulr_ps(fp0
, cpu_env
, fp0
, fp1
);
12304 tcg_temp_free_i64(fp1
);
12305 gen_store_fpr64(ctx
, fp0
, fd
);
12306 tcg_temp_free_i64(fp0
);
12309 case OPC_RECIP2_PS
:
12312 TCGv_i64 fp0
= tcg_temp_new_i64();
12313 TCGv_i64 fp1
= tcg_temp_new_i64();
12315 gen_load_fpr64(ctx
, fp0
, fs
);
12316 gen_load_fpr64(ctx
, fp1
, ft
);
12317 gen_helper_float_recip2_ps(fp0
, cpu_env
, fp0
, fp1
);
12318 tcg_temp_free_i64(fp1
);
12319 gen_store_fpr64(ctx
, fp0
, fd
);
12320 tcg_temp_free_i64(fp0
);
12323 case OPC_RECIP1_PS
:
12326 TCGv_i64 fp0
= tcg_temp_new_i64();
12328 gen_load_fpr64(ctx
, fp0
, fs
);
12329 gen_helper_float_recip1_ps(fp0
, cpu_env
, fp0
);
12330 gen_store_fpr64(ctx
, fp0
, fd
);
12331 tcg_temp_free_i64(fp0
);
12334 case OPC_RSQRT1_PS
:
12337 TCGv_i64 fp0
= tcg_temp_new_i64();
12339 gen_load_fpr64(ctx
, fp0
, fs
);
12340 gen_helper_float_rsqrt1_ps(fp0
, cpu_env
, fp0
);
12341 gen_store_fpr64(ctx
, fp0
, fd
);
12342 tcg_temp_free_i64(fp0
);
12345 case OPC_RSQRT2_PS
:
12348 TCGv_i64 fp0
= tcg_temp_new_i64();
12349 TCGv_i64 fp1
= tcg_temp_new_i64();
12351 gen_load_fpr64(ctx
, fp0
, fs
);
12352 gen_load_fpr64(ctx
, fp1
, ft
);
12353 gen_helper_float_rsqrt2_ps(fp0
, cpu_env
, fp0
, fp1
);
12354 tcg_temp_free_i64(fp1
);
12355 gen_store_fpr64(ctx
, fp0
, fd
);
12356 tcg_temp_free_i64(fp0
);
12360 check_cp1_64bitmode(ctx
);
12362 TCGv_i32 fp0
= tcg_temp_new_i32();
12364 gen_load_fpr32h(ctx
, fp0
, fs
);
12365 gen_helper_float_cvts_pu(fp0
, cpu_env
, fp0
);
12366 gen_store_fpr32(ctx
, fp0
, fd
);
12367 tcg_temp_free_i32(fp0
);
12370 case OPC_CVT_PW_PS
:
12373 TCGv_i64 fp0
= tcg_temp_new_i64();
12375 gen_load_fpr64(ctx
, fp0
, fs
);
12376 gen_helper_float_cvtpw_ps(fp0
, cpu_env
, fp0
);
12377 gen_store_fpr64(ctx
, fp0
, fd
);
12378 tcg_temp_free_i64(fp0
);
12382 check_cp1_64bitmode(ctx
);
12384 TCGv_i32 fp0
= tcg_temp_new_i32();
12386 gen_load_fpr32(ctx
, fp0
, fs
);
12387 gen_helper_float_cvts_pl(fp0
, cpu_env
, fp0
);
12388 gen_store_fpr32(ctx
, fp0
, fd
);
12389 tcg_temp_free_i32(fp0
);
12395 TCGv_i32 fp0
= tcg_temp_new_i32();
12396 TCGv_i32 fp1
= tcg_temp_new_i32();
12398 gen_load_fpr32(ctx
, fp0
, fs
);
12399 gen_load_fpr32(ctx
, fp1
, ft
);
12400 gen_store_fpr32h(ctx
, fp0
, fd
);
12401 gen_store_fpr32(ctx
, fp1
, fd
);
12402 tcg_temp_free_i32(fp0
);
12403 tcg_temp_free_i32(fp1
);
12409 TCGv_i32 fp0
= tcg_temp_new_i32();
12410 TCGv_i32 fp1
= tcg_temp_new_i32();
12412 gen_load_fpr32(ctx
, fp0
, fs
);
12413 gen_load_fpr32h(ctx
, fp1
, ft
);
12414 gen_store_fpr32(ctx
, fp1
, fd
);
12415 gen_store_fpr32h(ctx
, fp0
, fd
);
12416 tcg_temp_free_i32(fp0
);
12417 tcg_temp_free_i32(fp1
);
12423 TCGv_i32 fp0
= tcg_temp_new_i32();
12424 TCGv_i32 fp1
= tcg_temp_new_i32();
12426 gen_load_fpr32h(ctx
, fp0
, fs
);
12427 gen_load_fpr32(ctx
, fp1
, ft
);
12428 gen_store_fpr32(ctx
, fp1
, fd
);
12429 gen_store_fpr32h(ctx
, fp0
, fd
);
12430 tcg_temp_free_i32(fp0
);
12431 tcg_temp_free_i32(fp1
);
12437 TCGv_i32 fp0
= tcg_temp_new_i32();
12438 TCGv_i32 fp1
= tcg_temp_new_i32();
12440 gen_load_fpr32h(ctx
, fp0
, fs
);
12441 gen_load_fpr32h(ctx
, fp1
, ft
);
12442 gen_store_fpr32(ctx
, fp1
, fd
);
12443 gen_store_fpr32h(ctx
, fp0
, fd
);
12444 tcg_temp_free_i32(fp0
);
12445 tcg_temp_free_i32(fp1
);
12449 case OPC_CMP_UN_PS
:
12450 case OPC_CMP_EQ_PS
:
12451 case OPC_CMP_UEQ_PS
:
12452 case OPC_CMP_OLT_PS
:
12453 case OPC_CMP_ULT_PS
:
12454 case OPC_CMP_OLE_PS
:
12455 case OPC_CMP_ULE_PS
:
12456 case OPC_CMP_SF_PS
:
12457 case OPC_CMP_NGLE_PS
:
12458 case OPC_CMP_SEQ_PS
:
12459 case OPC_CMP_NGL_PS
:
12460 case OPC_CMP_LT_PS
:
12461 case OPC_CMP_NGE_PS
:
12462 case OPC_CMP_LE_PS
:
12463 case OPC_CMP_NGT_PS
:
12464 if (ctx
->opcode
& (1 << 6)) {
12465 gen_cmpabs_ps(ctx
, func
- 48, ft
, fs
, cc
);
12467 gen_cmp_ps(ctx
, func
- 48, ft
, fs
, cc
);
12471 MIPS_INVAL("farith");
12472 gen_reserved_instruction(ctx
);
12477 /* Coprocessor 3 (FPU) */
12478 static void gen_flt3_ldst(DisasContext
*ctx
, uint32_t opc
,
12479 int fd
, int fs
, int base
, int index
)
12481 TCGv t0
= tcg_temp_new();
12484 gen_load_gpr(t0
, index
);
12485 } else if (index
== 0) {
12486 gen_load_gpr(t0
, base
);
12488 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[index
]);
12491 * Don't do NOP if destination is zero: we must perform the actual
12498 TCGv_i32 fp0
= tcg_temp_new_i32();
12500 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
12501 tcg_gen_trunc_tl_i32(fp0
, t0
);
12502 gen_store_fpr32(ctx
, fp0
, fd
);
12503 tcg_temp_free_i32(fp0
);
12508 check_cp1_registers(ctx
, fd
);
12510 TCGv_i64 fp0
= tcg_temp_new_i64();
12511 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12512 gen_store_fpr64(ctx
, fp0
, fd
);
12513 tcg_temp_free_i64(fp0
);
12517 check_cp1_64bitmode(ctx
);
12518 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12520 TCGv_i64 fp0
= tcg_temp_new_i64();
12522 tcg_gen_qemu_ld_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12523 gen_store_fpr64(ctx
, fp0
, fd
);
12524 tcg_temp_free_i64(fp0
);
12530 TCGv_i32 fp0
= tcg_temp_new_i32();
12531 gen_load_fpr32(ctx
, fp0
, fs
);
12532 tcg_gen_qemu_st_i32(fp0
, t0
, ctx
->mem_idx
, MO_TEUL
);
12533 tcg_temp_free_i32(fp0
);
12538 check_cp1_registers(ctx
, fs
);
12540 TCGv_i64 fp0
= tcg_temp_new_i64();
12541 gen_load_fpr64(ctx
, fp0
, fs
);
12542 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12543 tcg_temp_free_i64(fp0
);
12547 check_cp1_64bitmode(ctx
);
12548 tcg_gen_andi_tl(t0
, t0
, ~0x7);
12550 TCGv_i64 fp0
= tcg_temp_new_i64();
12551 gen_load_fpr64(ctx
, fp0
, fs
);
12552 tcg_gen_qemu_st_i64(fp0
, t0
, ctx
->mem_idx
, MO_TEQ
);
12553 tcg_temp_free_i64(fp0
);
12560 static void gen_flt3_arith(DisasContext
*ctx
, uint32_t opc
,
12561 int fd
, int fr
, int fs
, int ft
)
12567 TCGv t0
= tcg_temp_local_new();
12568 TCGv_i32 fp
= tcg_temp_new_i32();
12569 TCGv_i32 fph
= tcg_temp_new_i32();
12570 TCGLabel
*l1
= gen_new_label();
12571 TCGLabel
*l2
= gen_new_label();
12573 gen_load_gpr(t0
, fr
);
12574 tcg_gen_andi_tl(t0
, t0
, 0x7);
12576 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 0, l1
);
12577 gen_load_fpr32(ctx
, fp
, fs
);
12578 gen_load_fpr32h(ctx
, fph
, fs
);
12579 gen_store_fpr32(ctx
, fp
, fd
);
12580 gen_store_fpr32h(ctx
, fph
, fd
);
12583 tcg_gen_brcondi_tl(TCG_COND_NE
, t0
, 4, l2
);
12585 #ifdef TARGET_WORDS_BIGENDIAN
12586 gen_load_fpr32(ctx
, fp
, fs
);
12587 gen_load_fpr32h(ctx
, fph
, ft
);
12588 gen_store_fpr32h(ctx
, fp
, fd
);
12589 gen_store_fpr32(ctx
, fph
, fd
);
12591 gen_load_fpr32h(ctx
, fph
, fs
);
12592 gen_load_fpr32(ctx
, fp
, ft
);
12593 gen_store_fpr32(ctx
, fph
, fd
);
12594 gen_store_fpr32h(ctx
, fp
, fd
);
12597 tcg_temp_free_i32(fp
);
12598 tcg_temp_free_i32(fph
);
12604 TCGv_i32 fp0
= tcg_temp_new_i32();
12605 TCGv_i32 fp1
= tcg_temp_new_i32();
12606 TCGv_i32 fp2
= tcg_temp_new_i32();
12608 gen_load_fpr32(ctx
, fp0
, fs
);
12609 gen_load_fpr32(ctx
, fp1
, ft
);
12610 gen_load_fpr32(ctx
, fp2
, fr
);
12611 gen_helper_float_madd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12612 tcg_temp_free_i32(fp0
);
12613 tcg_temp_free_i32(fp1
);
12614 gen_store_fpr32(ctx
, fp2
, fd
);
12615 tcg_temp_free_i32(fp2
);
12620 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12622 TCGv_i64 fp0
= tcg_temp_new_i64();
12623 TCGv_i64 fp1
= tcg_temp_new_i64();
12624 TCGv_i64 fp2
= tcg_temp_new_i64();
12626 gen_load_fpr64(ctx
, fp0
, fs
);
12627 gen_load_fpr64(ctx
, fp1
, ft
);
12628 gen_load_fpr64(ctx
, fp2
, fr
);
12629 gen_helper_float_madd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12630 tcg_temp_free_i64(fp0
);
12631 tcg_temp_free_i64(fp1
);
12632 gen_store_fpr64(ctx
, fp2
, fd
);
12633 tcg_temp_free_i64(fp2
);
12639 TCGv_i64 fp0
= tcg_temp_new_i64();
12640 TCGv_i64 fp1
= tcg_temp_new_i64();
12641 TCGv_i64 fp2
= tcg_temp_new_i64();
12643 gen_load_fpr64(ctx
, fp0
, fs
);
12644 gen_load_fpr64(ctx
, fp1
, ft
);
12645 gen_load_fpr64(ctx
, fp2
, fr
);
12646 gen_helper_float_madd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12647 tcg_temp_free_i64(fp0
);
12648 tcg_temp_free_i64(fp1
);
12649 gen_store_fpr64(ctx
, fp2
, fd
);
12650 tcg_temp_free_i64(fp2
);
12656 TCGv_i32 fp0
= tcg_temp_new_i32();
12657 TCGv_i32 fp1
= tcg_temp_new_i32();
12658 TCGv_i32 fp2
= tcg_temp_new_i32();
12660 gen_load_fpr32(ctx
, fp0
, fs
);
12661 gen_load_fpr32(ctx
, fp1
, ft
);
12662 gen_load_fpr32(ctx
, fp2
, fr
);
12663 gen_helper_float_msub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12664 tcg_temp_free_i32(fp0
);
12665 tcg_temp_free_i32(fp1
);
12666 gen_store_fpr32(ctx
, fp2
, fd
);
12667 tcg_temp_free_i32(fp2
);
12672 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12674 TCGv_i64 fp0
= tcg_temp_new_i64();
12675 TCGv_i64 fp1
= tcg_temp_new_i64();
12676 TCGv_i64 fp2
= tcg_temp_new_i64();
12678 gen_load_fpr64(ctx
, fp0
, fs
);
12679 gen_load_fpr64(ctx
, fp1
, ft
);
12680 gen_load_fpr64(ctx
, fp2
, fr
);
12681 gen_helper_float_msub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12682 tcg_temp_free_i64(fp0
);
12683 tcg_temp_free_i64(fp1
);
12684 gen_store_fpr64(ctx
, fp2
, fd
);
12685 tcg_temp_free_i64(fp2
);
12691 TCGv_i64 fp0
= tcg_temp_new_i64();
12692 TCGv_i64 fp1
= tcg_temp_new_i64();
12693 TCGv_i64 fp2
= tcg_temp_new_i64();
12695 gen_load_fpr64(ctx
, fp0
, fs
);
12696 gen_load_fpr64(ctx
, fp1
, ft
);
12697 gen_load_fpr64(ctx
, fp2
, fr
);
12698 gen_helper_float_msub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12699 tcg_temp_free_i64(fp0
);
12700 tcg_temp_free_i64(fp1
);
12701 gen_store_fpr64(ctx
, fp2
, fd
);
12702 tcg_temp_free_i64(fp2
);
12708 TCGv_i32 fp0
= tcg_temp_new_i32();
12709 TCGv_i32 fp1
= tcg_temp_new_i32();
12710 TCGv_i32 fp2
= tcg_temp_new_i32();
12712 gen_load_fpr32(ctx
, fp0
, fs
);
12713 gen_load_fpr32(ctx
, fp1
, ft
);
12714 gen_load_fpr32(ctx
, fp2
, fr
);
12715 gen_helper_float_nmadd_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12716 tcg_temp_free_i32(fp0
);
12717 tcg_temp_free_i32(fp1
);
12718 gen_store_fpr32(ctx
, fp2
, fd
);
12719 tcg_temp_free_i32(fp2
);
12724 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12726 TCGv_i64 fp0
= tcg_temp_new_i64();
12727 TCGv_i64 fp1
= tcg_temp_new_i64();
12728 TCGv_i64 fp2
= tcg_temp_new_i64();
12730 gen_load_fpr64(ctx
, fp0
, fs
);
12731 gen_load_fpr64(ctx
, fp1
, ft
);
12732 gen_load_fpr64(ctx
, fp2
, fr
);
12733 gen_helper_float_nmadd_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12734 tcg_temp_free_i64(fp0
);
12735 tcg_temp_free_i64(fp1
);
12736 gen_store_fpr64(ctx
, fp2
, fd
);
12737 tcg_temp_free_i64(fp2
);
12743 TCGv_i64 fp0
= tcg_temp_new_i64();
12744 TCGv_i64 fp1
= tcg_temp_new_i64();
12745 TCGv_i64 fp2
= tcg_temp_new_i64();
12747 gen_load_fpr64(ctx
, fp0
, fs
);
12748 gen_load_fpr64(ctx
, fp1
, ft
);
12749 gen_load_fpr64(ctx
, fp2
, fr
);
12750 gen_helper_float_nmadd_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12751 tcg_temp_free_i64(fp0
);
12752 tcg_temp_free_i64(fp1
);
12753 gen_store_fpr64(ctx
, fp2
, fd
);
12754 tcg_temp_free_i64(fp2
);
12760 TCGv_i32 fp0
= tcg_temp_new_i32();
12761 TCGv_i32 fp1
= tcg_temp_new_i32();
12762 TCGv_i32 fp2
= tcg_temp_new_i32();
12764 gen_load_fpr32(ctx
, fp0
, fs
);
12765 gen_load_fpr32(ctx
, fp1
, ft
);
12766 gen_load_fpr32(ctx
, fp2
, fr
);
12767 gen_helper_float_nmsub_s(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12768 tcg_temp_free_i32(fp0
);
12769 tcg_temp_free_i32(fp1
);
12770 gen_store_fpr32(ctx
, fp2
, fd
);
12771 tcg_temp_free_i32(fp2
);
12776 check_cp1_registers(ctx
, fd
| fs
| ft
| fr
);
12778 TCGv_i64 fp0
= tcg_temp_new_i64();
12779 TCGv_i64 fp1
= tcg_temp_new_i64();
12780 TCGv_i64 fp2
= tcg_temp_new_i64();
12782 gen_load_fpr64(ctx
, fp0
, fs
);
12783 gen_load_fpr64(ctx
, fp1
, ft
);
12784 gen_load_fpr64(ctx
, fp2
, fr
);
12785 gen_helper_float_nmsub_d(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12786 tcg_temp_free_i64(fp0
);
12787 tcg_temp_free_i64(fp1
);
12788 gen_store_fpr64(ctx
, fp2
, fd
);
12789 tcg_temp_free_i64(fp2
);
12795 TCGv_i64 fp0
= tcg_temp_new_i64();
12796 TCGv_i64 fp1
= tcg_temp_new_i64();
12797 TCGv_i64 fp2
= tcg_temp_new_i64();
12799 gen_load_fpr64(ctx
, fp0
, fs
);
12800 gen_load_fpr64(ctx
, fp1
, ft
);
12801 gen_load_fpr64(ctx
, fp2
, fr
);
12802 gen_helper_float_nmsub_ps(fp2
, cpu_env
, fp0
, fp1
, fp2
);
12803 tcg_temp_free_i64(fp0
);
12804 tcg_temp_free_i64(fp1
);
12805 gen_store_fpr64(ctx
, fp2
, fd
);
12806 tcg_temp_free_i64(fp2
);
12810 MIPS_INVAL("flt3_arith");
12811 gen_reserved_instruction(ctx
);
12816 static void gen_rdhwr(DisasContext
*ctx
, int rt
, int rd
, int sel
)
12820 #if !defined(CONFIG_USER_ONLY)
12822 * The Linux kernel will emulate rdhwr if it's not supported natively.
12823 * Therefore only check the ISA in system mode.
12825 check_insn(ctx
, ISA_MIPS_R2
);
12827 t0
= tcg_temp_new();
12831 gen_helper_rdhwr_cpunum(t0
, cpu_env
);
12832 gen_store_gpr(t0
, rt
);
12835 gen_helper_rdhwr_synci_step(t0
, cpu_env
);
12836 gen_store_gpr(t0
, rt
);
12839 if (tb_cflags(ctx
->base
.tb
) & CF_USE_ICOUNT
) {
12842 gen_helper_rdhwr_cc(t0
, cpu_env
);
12843 gen_store_gpr(t0
, rt
);
12845 * Break the TB to be able to take timer interrupts immediately
12846 * after reading count. DISAS_STOP isn't sufficient, we need to ensure
12847 * we break completely out of translated code.
12849 gen_save_pc(ctx
->base
.pc_next
+ 4);
12850 ctx
->base
.is_jmp
= DISAS_EXIT
;
12853 gen_helper_rdhwr_ccres(t0
, cpu_env
);
12854 gen_store_gpr(t0
, rt
);
12857 check_insn(ctx
, ISA_MIPS_R6
);
12860 * Performance counter registers are not implemented other than
12861 * control register 0.
12863 generate_exception(ctx
, EXCP_RI
);
12865 gen_helper_rdhwr_performance(t0
, cpu_env
);
12866 gen_store_gpr(t0
, rt
);
12869 check_insn(ctx
, ISA_MIPS_R6
);
12870 gen_helper_rdhwr_xnp(t0
, cpu_env
);
12871 gen_store_gpr(t0
, rt
);
12874 #if defined(CONFIG_USER_ONLY)
12875 tcg_gen_ld_tl(t0
, cpu_env
,
12876 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12877 gen_store_gpr(t0
, rt
);
12880 if ((ctx
->hflags
& MIPS_HFLAG_CP0
) ||
12881 (ctx
->hflags
& MIPS_HFLAG_HWRENA_ULR
)) {
12882 tcg_gen_ld_tl(t0
, cpu_env
,
12883 offsetof(CPUMIPSState
, active_tc
.CP0_UserLocal
));
12884 gen_store_gpr(t0
, rt
);
12886 gen_reserved_instruction(ctx
);
12890 default: /* Invalid */
12891 MIPS_INVAL("rdhwr");
12892 gen_reserved_instruction(ctx
);
12898 static inline void clear_branch_hflags(DisasContext
*ctx
)
12900 ctx
->hflags
&= ~MIPS_HFLAG_BMASK
;
12901 if (ctx
->base
.is_jmp
== DISAS_NEXT
) {
12902 save_cpu_state(ctx
, 0);
12905 * It is not safe to save ctx->hflags as hflags may be changed
12906 * in execution time by the instruction in delay / forbidden slot.
12908 tcg_gen_andi_i32(hflags
, hflags
, ~MIPS_HFLAG_BMASK
);
12912 static void gen_branch(DisasContext
*ctx
, int insn_bytes
)
12914 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12915 int proc_hflags
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
12916 /* Branches completion */
12917 clear_branch_hflags(ctx
);
12918 ctx
->base
.is_jmp
= DISAS_NORETURN
;
12919 /* FIXME: Need to clear can_do_io. */
12920 switch (proc_hflags
& MIPS_HFLAG_BMASK_BASE
) {
12921 case MIPS_HFLAG_FBNSLOT
:
12922 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ insn_bytes
);
12925 /* unconditional branch */
12926 if (proc_hflags
& MIPS_HFLAG_BX
) {
12927 tcg_gen_xori_i32(hflags
, hflags
, MIPS_HFLAG_M16
);
12929 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12931 case MIPS_HFLAG_BL
:
12932 /* blikely taken case */
12933 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12935 case MIPS_HFLAG_BC
:
12936 /* Conditional branch */
12938 TCGLabel
*l1
= gen_new_label();
12940 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
12941 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ insn_bytes
);
12943 gen_goto_tb(ctx
, 0, ctx
->btarget
);
12946 case MIPS_HFLAG_BR
:
12947 /* unconditional branch to register */
12948 if (ctx
->insn_flags
& (ASE_MIPS16
| ASE_MICROMIPS
)) {
12949 TCGv t0
= tcg_temp_new();
12950 TCGv_i32 t1
= tcg_temp_new_i32();
12952 tcg_gen_andi_tl(t0
, btarget
, 0x1);
12953 tcg_gen_trunc_tl_i32(t1
, t0
);
12955 tcg_gen_andi_i32(hflags
, hflags
, ~(uint32_t)MIPS_HFLAG_M16
);
12956 tcg_gen_shli_i32(t1
, t1
, MIPS_HFLAG_M16_SHIFT
);
12957 tcg_gen_or_i32(hflags
, hflags
, t1
);
12958 tcg_temp_free_i32(t1
);
12960 tcg_gen_andi_tl(cpu_PC
, btarget
, ~(target_ulong
)0x1);
12962 tcg_gen_mov_tl(cpu_PC
, btarget
);
12964 if (ctx
->base
.singlestep_enabled
) {
12965 save_cpu_state(ctx
, 0);
12966 gen_helper_raise_exception_debug(cpu_env
);
12968 tcg_gen_lookup_and_goto_ptr();
12971 fprintf(stderr
, "unknown branch 0x%x\n", proc_hflags
);
12977 /* Compact Branches */
12978 static void gen_compute_compact_branch(DisasContext
*ctx
, uint32_t opc
,
12979 int rs
, int rt
, int32_t offset
)
12981 int bcond_compute
= 0;
12982 TCGv t0
= tcg_temp_new();
12983 TCGv t1
= tcg_temp_new();
12984 int m16_lowbit
= (ctx
->hflags
& MIPS_HFLAG_M16
) != 0;
12986 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
12987 #ifdef MIPS_DEBUG_DISAS
12988 LOG_DISAS("Branch in delay / forbidden slot at PC 0x" TARGET_FMT_lx
12989 "\n", ctx
->base
.pc_next
);
12991 gen_reserved_instruction(ctx
);
12995 /* Load needed operands and calculate btarget */
12997 /* compact branch */
12998 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
12999 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13000 gen_load_gpr(t0
, rs
);
13001 gen_load_gpr(t1
, rt
);
13003 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13004 if (rs
<= rt
&& rs
== 0) {
13005 /* OPC_BEQZALC, OPC_BNEZALC */
13006 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13009 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13010 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13011 gen_load_gpr(t0
, rs
);
13012 gen_load_gpr(t1
, rt
);
13014 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13016 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
13017 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
13018 if (rs
== 0 || rs
== rt
) {
13019 /* OPC_BLEZALC, OPC_BGEZALC */
13020 /* OPC_BGTZALC, OPC_BLTZALC */
13021 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13023 gen_load_gpr(t0
, rs
);
13024 gen_load_gpr(t1
, rt
);
13026 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13030 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13035 /* OPC_BEQZC, OPC_BNEZC */
13036 gen_load_gpr(t0
, rs
);
13038 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
13040 /* OPC_JIC, OPC_JIALC */
13041 TCGv tbase
= tcg_temp_new();
13042 TCGv toffset
= tcg_temp_new();
13044 gen_load_gpr(tbase
, rt
);
13045 tcg_gen_movi_tl(toffset
, offset
);
13046 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
13047 tcg_temp_free(tbase
);
13048 tcg_temp_free(toffset
);
13052 MIPS_INVAL("Compact branch/jump");
13053 gen_reserved_instruction(ctx
);
13057 if (bcond_compute
== 0) {
13058 /* Uncoditional compact branch */
13061 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13064 ctx
->hflags
|= MIPS_HFLAG_BR
;
13067 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4 + m16_lowbit
);
13070 ctx
->hflags
|= MIPS_HFLAG_B
;
13073 MIPS_INVAL("Compact branch/jump");
13074 gen_reserved_instruction(ctx
);
13078 /* Generating branch here as compact branches don't have delay slot */
13079 gen_branch(ctx
, 4);
13081 /* Conditional compact branch */
13082 TCGLabel
*fs
= gen_new_label();
13083 save_cpu_state(ctx
, 0);
13086 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC */
13087 if (rs
== 0 && rt
!= 0) {
13089 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13090 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13092 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13095 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
13098 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC */
13099 if (rs
== 0 && rt
!= 0) {
13101 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13102 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13104 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13107 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
13110 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC */
13111 if (rs
== 0 && rt
!= 0) {
13113 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
13114 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13116 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
13119 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
13122 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC */
13123 if (rs
== 0 && rt
!= 0) {
13125 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
13126 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
13128 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
13131 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
13134 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC */
13135 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
13137 /* OPC_BOVC, OPC_BNVC */
13138 TCGv t2
= tcg_temp_new();
13139 TCGv t3
= tcg_temp_new();
13140 TCGv t4
= tcg_temp_new();
13141 TCGv input_overflow
= tcg_temp_new();
13143 gen_load_gpr(t0
, rs
);
13144 gen_load_gpr(t1
, rt
);
13145 tcg_gen_ext32s_tl(t2
, t0
);
13146 tcg_gen_setcond_tl(TCG_COND_NE
, input_overflow
, t2
, t0
);
13147 tcg_gen_ext32s_tl(t3
, t1
);
13148 tcg_gen_setcond_tl(TCG_COND_NE
, t4
, t3
, t1
);
13149 tcg_gen_or_tl(input_overflow
, input_overflow
, t4
);
13151 tcg_gen_add_tl(t4
, t2
, t3
);
13152 tcg_gen_ext32s_tl(t4
, t4
);
13153 tcg_gen_xor_tl(t2
, t2
, t3
);
13154 tcg_gen_xor_tl(t3
, t4
, t3
);
13155 tcg_gen_andc_tl(t2
, t3
, t2
);
13156 tcg_gen_setcondi_tl(TCG_COND_LT
, t4
, t2
, 0);
13157 tcg_gen_or_tl(t4
, t4
, input_overflow
);
13158 if (opc
== OPC_BOVC
) {
13160 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t4
, 0, fs
);
13163 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t4
, 0, fs
);
13165 tcg_temp_free(input_overflow
);
13169 } else if (rs
< rt
&& rs
== 0) {
13170 /* OPC_BEQZALC, OPC_BNEZALC */
13171 if (opc
== OPC_BEQZALC
) {
13173 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t1
, 0, fs
);
13176 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t1
, 0, fs
);
13179 /* OPC_BEQC, OPC_BNEC */
13180 if (opc
== OPC_BEQC
) {
13182 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, t1
, fs
);
13185 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_NE
), t0
, t1
, fs
);
13190 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
13193 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_NE
), t0
, 0, fs
);
13196 MIPS_INVAL("Compact conditional branch/jump");
13197 gen_reserved_instruction(ctx
);
13201 /* Generating branch here as compact branches don't have delay slot */
13202 gen_goto_tb(ctx
, 1, ctx
->btarget
);
13205 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
13213 /* ISA extensions (ASEs) */
13214 /* MIPS16 extension to MIPS32 */
13216 /* MIPS16 major opcodes */
13218 M16_OPC_ADDIUSP
= 0x00,
13219 M16_OPC_ADDIUPC
= 0x01,
13221 M16_OPC_JAL
= 0x03,
13222 M16_OPC_BEQZ
= 0x04,
13223 M16_OPC_BNEQZ
= 0x05,
13224 M16_OPC_SHIFT
= 0x06,
13226 M16_OPC_RRIA
= 0x08,
13227 M16_OPC_ADDIU8
= 0x09,
13228 M16_OPC_SLTI
= 0x0a,
13229 M16_OPC_SLTIU
= 0x0b,
13232 M16_OPC_CMPI
= 0x0e,
13236 M16_OPC_LWSP
= 0x12,
13238 M16_OPC_LBU
= 0x14,
13239 M16_OPC_LHU
= 0x15,
13240 M16_OPC_LWPC
= 0x16,
13241 M16_OPC_LWU
= 0x17,
13244 M16_OPC_SWSP
= 0x1a,
13246 M16_OPC_RRR
= 0x1c,
13248 M16_OPC_EXTEND
= 0x1e,
13252 /* I8 funct field */
13271 /* RR funct field */
13305 /* I64 funct field */
13313 I64_DADDIUPC
= 0x6,
13317 /* RR ry field for CNVT */
13319 RR_RY_CNVT_ZEB
= 0x0,
13320 RR_RY_CNVT_ZEH
= 0x1,
13321 RR_RY_CNVT_ZEW
= 0x2,
13322 RR_RY_CNVT_SEB
= 0x4,
13323 RR_RY_CNVT_SEH
= 0x5,
13324 RR_RY_CNVT_SEW
= 0x6,
13327 static int xlat(int r
)
13329 static int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
13334 static void gen_mips16_save(DisasContext
*ctx
,
13335 int xsregs
, int aregs
,
13336 int do_ra
, int do_s0
, int do_s1
,
13339 TCGv t0
= tcg_temp_new();
13340 TCGv t1
= tcg_temp_new();
13341 TCGv t2
= tcg_temp_new();
13371 gen_reserved_instruction(ctx
);
13377 gen_base_offset_addr(ctx
, t0
, 29, 12);
13378 gen_load_gpr(t1
, 7);
13379 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13382 gen_base_offset_addr(ctx
, t0
, 29, 8);
13383 gen_load_gpr(t1
, 6);
13384 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13387 gen_base_offset_addr(ctx
, t0
, 29, 4);
13388 gen_load_gpr(t1
, 5);
13389 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13392 gen_base_offset_addr(ctx
, t0
, 29, 0);
13393 gen_load_gpr(t1
, 4);
13394 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
13397 gen_load_gpr(t0
, 29);
13399 #define DECR_AND_STORE(reg) do { \
13400 tcg_gen_movi_tl(t2, -4); \
13401 gen_op_addr_add(ctx, t0, t0, t2); \
13402 gen_load_gpr(t1, reg); \
13403 tcg_gen_qemu_st_tl(t1, t0, ctx->mem_idx, MO_TEUL); \
13407 DECR_AND_STORE(31);
13412 DECR_AND_STORE(30);
13415 DECR_AND_STORE(23);
13418 DECR_AND_STORE(22);
13421 DECR_AND_STORE(21);
13424 DECR_AND_STORE(20);
13427 DECR_AND_STORE(19);
13430 DECR_AND_STORE(18);
13434 DECR_AND_STORE(17);
13437 DECR_AND_STORE(16);
13467 gen_reserved_instruction(ctx
);
13483 #undef DECR_AND_STORE
13485 tcg_gen_movi_tl(t2
, -framesize
);
13486 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13492 static void gen_mips16_restore(DisasContext
*ctx
,
13493 int xsregs
, int aregs
,
13494 int do_ra
, int do_s0
, int do_s1
,
13498 TCGv t0
= tcg_temp_new();
13499 TCGv t1
= tcg_temp_new();
13500 TCGv t2
= tcg_temp_new();
13502 tcg_gen_movi_tl(t2
, framesize
);
13503 gen_op_addr_add(ctx
, t0
, cpu_gpr
[29], t2
);
13505 #define DECR_AND_LOAD(reg) do { \
13506 tcg_gen_movi_tl(t2, -4); \
13507 gen_op_addr_add(ctx, t0, t0, t2); \
13508 tcg_gen_qemu_ld_tl(t1, t0, ctx->mem_idx, MO_TESL); \
13509 gen_store_gpr(t1, reg); \
13573 gen_reserved_instruction(ctx
);
13589 #undef DECR_AND_LOAD
13591 tcg_gen_movi_tl(t2
, framesize
);
13592 gen_op_addr_add(ctx
, cpu_gpr
[29], cpu_gpr
[29], t2
);
13598 static void gen_addiupc(DisasContext
*ctx
, int rx
, int imm
,
13599 int is_64_bit
, int extended
)
13603 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13604 gen_reserved_instruction(ctx
);
13608 t0
= tcg_temp_new();
13610 tcg_gen_movi_tl(t0
, pc_relative_pc(ctx
));
13611 tcg_gen_addi_tl(cpu_gpr
[rx
], t0
, imm
);
13613 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
13619 static void gen_cache_operation(DisasContext
*ctx
, uint32_t op
, int base
,
13622 TCGv_i32 t0
= tcg_const_i32(op
);
13623 TCGv t1
= tcg_temp_new();
13624 gen_base_offset_addr(ctx
, t1
, base
, offset
);
13625 gen_helper_cache(cpu_env
, t1
, t0
);
13628 #if defined(TARGET_MIPS64)
13629 static void decode_i64_mips16(DisasContext
*ctx
,
13630 int ry
, int funct
, int16_t offset
,
13635 check_insn(ctx
, ISA_MIPS3
);
13636 check_mips_64(ctx
);
13637 offset
= extended
? offset
: offset
<< 3;
13638 gen_ld(ctx
, OPC_LD
, ry
, 29, offset
);
13641 check_insn(ctx
, ISA_MIPS3
);
13642 check_mips_64(ctx
);
13643 offset
= extended
? offset
: offset
<< 3;
13644 gen_st(ctx
, OPC_SD
, ry
, 29, offset
);
13647 check_insn(ctx
, ISA_MIPS3
);
13648 check_mips_64(ctx
);
13649 offset
= extended
? offset
: (ctx
->opcode
& 0xff) << 3;
13650 gen_st(ctx
, OPC_SD
, 31, 29, offset
);
13653 check_insn(ctx
, ISA_MIPS3
);
13654 check_mips_64(ctx
);
13655 offset
= extended
? offset
: ((int8_t)ctx
->opcode
) << 3;
13656 gen_arith_imm(ctx
, OPC_DADDIU
, 29, 29, offset
);
13659 check_insn(ctx
, ISA_MIPS3
);
13660 check_mips_64(ctx
);
13661 if (extended
&& (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
13662 gen_reserved_instruction(ctx
);
13664 offset
= extended
? offset
: offset
<< 3;
13665 gen_ld(ctx
, OPC_LDPC
, ry
, 0, offset
);
13669 check_insn(ctx
, ISA_MIPS3
);
13670 check_mips_64(ctx
);
13671 offset
= extended
? offset
: ((int8_t)(offset
<< 3)) >> 3;
13672 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, ry
, offset
);
13675 check_insn(ctx
, ISA_MIPS3
);
13676 check_mips_64(ctx
);
13677 offset
= extended
? offset
: offset
<< 2;
13678 gen_addiupc(ctx
, ry
, offset
, 1, extended
);
13681 check_insn(ctx
, ISA_MIPS3
);
13682 check_mips_64(ctx
);
13683 offset
= extended
? offset
: offset
<< 2;
13684 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, 29, offset
);
13690 static int decode_extended_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13692 int extend
= translator_lduw(env
, ctx
->base
.pc_next
+ 2);
13693 int op
, rx
, ry
, funct
, sa
;
13694 int16_t imm
, offset
;
13696 ctx
->opcode
= (ctx
->opcode
<< 16) | extend
;
13697 op
= (ctx
->opcode
>> 11) & 0x1f;
13698 sa
= (ctx
->opcode
>> 22) & 0x1f;
13699 funct
= (ctx
->opcode
>> 8) & 0x7;
13700 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13701 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13702 offset
= imm
= (int16_t) (((ctx
->opcode
>> 16) & 0x1f) << 11
13703 | ((ctx
->opcode
>> 21) & 0x3f) << 5
13704 | (ctx
->opcode
& 0x1f));
13707 * The extended opcodes cleverly reuse the opcodes from their 16-bit
13711 case M16_OPC_ADDIUSP
:
13712 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13714 case M16_OPC_ADDIUPC
:
13715 gen_addiupc(ctx
, rx
, imm
, 0, 1);
13718 gen_compute_branch(ctx
, OPC_BEQ
, 4, 0, 0, offset
<< 1, 0);
13719 /* No delay slot, so just process as a normal instruction */
13722 gen_compute_branch(ctx
, OPC_BEQ
, 4, rx
, 0, offset
<< 1, 0);
13723 /* No delay slot, so just process as a normal instruction */
13725 case M16_OPC_BNEQZ
:
13726 gen_compute_branch(ctx
, OPC_BNE
, 4, rx
, 0, offset
<< 1, 0);
13727 /* No delay slot, so just process as a normal instruction */
13729 case M16_OPC_SHIFT
:
13730 switch (ctx
->opcode
& 0x3) {
13732 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13735 #if defined(TARGET_MIPS64)
13736 check_mips_64(ctx
);
13737 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13739 gen_reserved_instruction(ctx
);
13743 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13746 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13750 #if defined(TARGET_MIPS64)
13752 check_insn(ctx
, ISA_MIPS3
);
13753 check_mips_64(ctx
);
13754 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
);
13758 imm
= ctx
->opcode
& 0xf;
13759 imm
= imm
| ((ctx
->opcode
>> 20) & 0x7f) << 4;
13760 imm
= imm
| ((ctx
->opcode
>> 16) & 0xf) << 11;
13761 imm
= (int16_t) (imm
<< 1) >> 1;
13762 if ((ctx
->opcode
>> 4) & 0x1) {
13763 #if defined(TARGET_MIPS64)
13764 check_mips_64(ctx
);
13765 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13767 gen_reserved_instruction(ctx
);
13770 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
13773 case M16_OPC_ADDIU8
:
13774 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
13777 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
13779 case M16_OPC_SLTIU
:
13780 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
13785 gen_compute_branch(ctx
, OPC_BEQ
, 4, 24, 0, offset
<< 1, 0);
13788 gen_compute_branch(ctx
, OPC_BNE
, 4, 24, 0, offset
<< 1, 0);
13791 gen_st(ctx
, OPC_SW
, 31, 29, imm
);
13794 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
);
13797 check_insn(ctx
, ISA_MIPS_R1
);
13799 int xsregs
= (ctx
->opcode
>> 24) & 0x7;
13800 int aregs
= (ctx
->opcode
>> 16) & 0xf;
13801 int do_ra
= (ctx
->opcode
>> 6) & 0x1;
13802 int do_s0
= (ctx
->opcode
>> 5) & 0x1;
13803 int do_s1
= (ctx
->opcode
>> 4) & 0x1;
13804 int framesize
= (((ctx
->opcode
>> 20) & 0xf) << 4
13805 | (ctx
->opcode
& 0xf)) << 3;
13807 if (ctx
->opcode
& (1 << 7)) {
13808 gen_mips16_save(ctx
, xsregs
, aregs
,
13809 do_ra
, do_s0
, do_s1
,
13812 gen_mips16_restore(ctx
, xsregs
, aregs
,
13813 do_ra
, do_s0
, do_s1
,
13819 gen_reserved_instruction(ctx
);
13824 tcg_gen_movi_tl(cpu_gpr
[rx
], (uint16_t) imm
);
13827 tcg_gen_xori_tl(cpu_gpr
[24], cpu_gpr
[rx
], (uint16_t) imm
);
13829 #if defined(TARGET_MIPS64)
13831 check_insn(ctx
, ISA_MIPS3
);
13832 check_mips_64(ctx
);
13833 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
);
13837 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
13840 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
);
13843 gen_ld(ctx
, OPC_LW
, rx
, 29, offset
);
13846 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
);
13849 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
13852 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
);
13855 gen_ld(ctx
, OPC_LWPC
, rx
, 0, offset
);
13857 #if defined(TARGET_MIPS64)
13859 check_insn(ctx
, ISA_MIPS3
);
13860 check_mips_64(ctx
);
13861 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
);
13865 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
13868 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
);
13871 gen_st(ctx
, OPC_SW
, rx
, 29, offset
);
13874 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
);
13876 #if defined(TARGET_MIPS64)
13878 decode_i64_mips16(ctx
, ry
, funct
, offset
, 1);
13882 gen_reserved_instruction(ctx
);
13889 static inline bool is_uhi(int sdbbp_code
)
13891 #ifdef CONFIG_USER_ONLY
13894 return semihosting_enabled() && sdbbp_code
== 1;
13898 #ifdef CONFIG_USER_ONLY
13899 /* The above should dead-code away any calls to this..*/
13900 static inline void gen_helper_do_semihosting(void *env
)
13902 g_assert_not_reached();
13906 static int decode_mips16_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
13910 int op
, cnvt_op
, op1
, offset
;
13914 op
= (ctx
->opcode
>> 11) & 0x1f;
13915 sa
= (ctx
->opcode
>> 2) & 0x7;
13916 sa
= sa
== 0 ? 8 : sa
;
13917 rx
= xlat((ctx
->opcode
>> 8) & 0x7);
13918 cnvt_op
= (ctx
->opcode
>> 5) & 0x7;
13919 ry
= xlat((ctx
->opcode
>> 5) & 0x7);
13920 op1
= offset
= ctx
->opcode
& 0x1f;
13925 case M16_OPC_ADDIUSP
:
13927 int16_t imm
= ((uint8_t) ctx
->opcode
) << 2;
13929 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 29, imm
);
13932 case M16_OPC_ADDIUPC
:
13933 gen_addiupc(ctx
, rx
, ((uint8_t) ctx
->opcode
) << 2, 0, 0);
13936 offset
= (ctx
->opcode
& 0x7ff) << 1;
13937 offset
= (int16_t)(offset
<< 4) >> 4;
13938 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0, offset
, 0);
13939 /* No delay slot, so just process as a normal instruction */
13942 offset
= translator_lduw(env
, ctx
->base
.pc_next
+ 2);
13943 offset
= (((ctx
->opcode
& 0x1f) << 21)
13944 | ((ctx
->opcode
>> 5) & 0x1f) << 16
13946 op
= ((ctx
->opcode
>> 10) & 0x1) ? OPC_JALX
: OPC_JAL
;
13947 gen_compute_branch(ctx
, op
, 4, rx
, ry
, offset
, 2);
13951 gen_compute_branch(ctx
, OPC_BEQ
, 2, rx
, 0,
13952 ((int8_t)ctx
->opcode
) << 1, 0);
13953 /* No delay slot, so just process as a normal instruction */
13955 case M16_OPC_BNEQZ
:
13956 gen_compute_branch(ctx
, OPC_BNE
, 2, rx
, 0,
13957 ((int8_t)ctx
->opcode
) << 1, 0);
13958 /* No delay slot, so just process as a normal instruction */
13960 case M16_OPC_SHIFT
:
13961 switch (ctx
->opcode
& 0x3) {
13963 gen_shift_imm(ctx
, OPC_SLL
, rx
, ry
, sa
);
13966 #if defined(TARGET_MIPS64)
13967 check_insn(ctx
, ISA_MIPS3
);
13968 check_mips_64(ctx
);
13969 gen_shift_imm(ctx
, OPC_DSLL
, rx
, ry
, sa
);
13971 gen_reserved_instruction(ctx
);
13975 gen_shift_imm(ctx
, OPC_SRL
, rx
, ry
, sa
);
13978 gen_shift_imm(ctx
, OPC_SRA
, rx
, ry
, sa
);
13982 #if defined(TARGET_MIPS64)
13984 check_insn(ctx
, ISA_MIPS3
);
13985 check_mips_64(ctx
);
13986 gen_ld(ctx
, OPC_LD
, ry
, rx
, offset
<< 3);
13991 int16_t imm
= (int8_t)((ctx
->opcode
& 0xf) << 4) >> 4;
13993 if ((ctx
->opcode
>> 4) & 1) {
13994 #if defined(TARGET_MIPS64)
13995 check_insn(ctx
, ISA_MIPS3
);
13996 check_mips_64(ctx
);
13997 gen_arith_imm(ctx
, OPC_DADDIU
, ry
, rx
, imm
);
13999 gen_reserved_instruction(ctx
);
14002 gen_arith_imm(ctx
, OPC_ADDIU
, ry
, rx
, imm
);
14006 case M16_OPC_ADDIU8
:
14008 int16_t imm
= (int8_t) ctx
->opcode
;
14010 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, rx
, imm
);
14015 int16_t imm
= (uint8_t) ctx
->opcode
;
14016 gen_slt_imm(ctx
, OPC_SLTI
, 24, rx
, imm
);
14019 case M16_OPC_SLTIU
:
14021 int16_t imm
= (uint8_t) ctx
->opcode
;
14022 gen_slt_imm(ctx
, OPC_SLTIU
, 24, rx
, imm
);
14029 funct
= (ctx
->opcode
>> 8) & 0x7;
14032 gen_compute_branch(ctx
, OPC_BEQ
, 2, 24, 0,
14033 ((int8_t)ctx
->opcode
) << 1, 0);
14036 gen_compute_branch(ctx
, OPC_BNE
, 2, 24, 0,
14037 ((int8_t)ctx
->opcode
) << 1, 0);
14040 gen_st(ctx
, OPC_SW
, 31, 29, (ctx
->opcode
& 0xff) << 2);
14043 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29,
14044 ((int8_t)ctx
->opcode
) << 3);
14047 check_insn(ctx
, ISA_MIPS_R1
);
14049 int do_ra
= ctx
->opcode
& (1 << 6);
14050 int do_s0
= ctx
->opcode
& (1 << 5);
14051 int do_s1
= ctx
->opcode
& (1 << 4);
14052 int framesize
= ctx
->opcode
& 0xf;
14054 if (framesize
== 0) {
14057 framesize
= framesize
<< 3;
14060 if (ctx
->opcode
& (1 << 7)) {
14061 gen_mips16_save(ctx
, 0, 0,
14062 do_ra
, do_s0
, do_s1
, framesize
);
14064 gen_mips16_restore(ctx
, 0, 0,
14065 do_ra
, do_s0
, do_s1
, framesize
);
14071 int rz
= xlat(ctx
->opcode
& 0x7);
14073 reg32
= (((ctx
->opcode
>> 3) & 0x3) << 3) |
14074 ((ctx
->opcode
>> 5) & 0x7);
14075 gen_arith(ctx
, OPC_ADDU
, reg32
, rz
, 0);
14079 reg32
= ctx
->opcode
& 0x1f;
14080 gen_arith(ctx
, OPC_ADDU
, ry
, reg32
, 0);
14083 gen_reserved_instruction(ctx
);
14090 int16_t imm
= (uint8_t) ctx
->opcode
;
14092 gen_arith_imm(ctx
, OPC_ADDIU
, rx
, 0, imm
);
14097 int16_t imm
= (uint8_t) ctx
->opcode
;
14098 gen_logic_imm(ctx
, OPC_XORI
, 24, rx
, imm
);
14101 #if defined(TARGET_MIPS64)
14103 check_insn(ctx
, ISA_MIPS3
);
14104 check_mips_64(ctx
);
14105 gen_st(ctx
, OPC_SD
, ry
, rx
, offset
<< 3);
14109 gen_ld(ctx
, OPC_LB
, ry
, rx
, offset
);
14112 gen_ld(ctx
, OPC_LH
, ry
, rx
, offset
<< 1);
14115 gen_ld(ctx
, OPC_LW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14118 gen_ld(ctx
, OPC_LW
, ry
, rx
, offset
<< 2);
14121 gen_ld(ctx
, OPC_LBU
, ry
, rx
, offset
);
14124 gen_ld(ctx
, OPC_LHU
, ry
, rx
, offset
<< 1);
14127 gen_ld(ctx
, OPC_LWPC
, rx
, 0, ((uint8_t)ctx
->opcode
) << 2);
14129 #if defined(TARGET_MIPS64)
14131 check_insn(ctx
, ISA_MIPS3
);
14132 check_mips_64(ctx
);
14133 gen_ld(ctx
, OPC_LWU
, ry
, rx
, offset
<< 2);
14137 gen_st(ctx
, OPC_SB
, ry
, rx
, offset
);
14140 gen_st(ctx
, OPC_SH
, ry
, rx
, offset
<< 1);
14143 gen_st(ctx
, OPC_SW
, rx
, 29, ((uint8_t)ctx
->opcode
) << 2);
14146 gen_st(ctx
, OPC_SW
, ry
, rx
, offset
<< 2);
14150 int rz
= xlat((ctx
->opcode
>> 2) & 0x7);
14153 switch (ctx
->opcode
& 0x3) {
14155 mips32_op
= OPC_ADDU
;
14158 mips32_op
= OPC_SUBU
;
14160 #if defined(TARGET_MIPS64)
14162 mips32_op
= OPC_DADDU
;
14163 check_insn(ctx
, ISA_MIPS3
);
14164 check_mips_64(ctx
);
14167 mips32_op
= OPC_DSUBU
;
14168 check_insn(ctx
, ISA_MIPS3
);
14169 check_mips_64(ctx
);
14173 gen_reserved_instruction(ctx
);
14177 gen_arith(ctx
, mips32_op
, rz
, rx
, ry
);
14186 int nd
= (ctx
->opcode
>> 7) & 0x1;
14187 int link
= (ctx
->opcode
>> 6) & 0x1;
14188 int ra
= (ctx
->opcode
>> 5) & 0x1;
14191 check_insn(ctx
, ISA_MIPS_R1
);
14200 gen_compute_branch(ctx
, op
, 2, ra
? 31 : rx
, 31, 0,
14205 if (is_uhi(extract32(ctx
->opcode
, 5, 6))) {
14206 gen_helper_do_semihosting(cpu_env
);
14209 * XXX: not clear which exception should be raised
14210 * when in debug mode...
14212 check_insn(ctx
, ISA_MIPS_R1
);
14213 generate_exception_end(ctx
, EXCP_DBp
);
14217 gen_slt(ctx
, OPC_SLT
, 24, rx
, ry
);
14220 gen_slt(ctx
, OPC_SLTU
, 24, rx
, ry
);
14223 generate_exception_end(ctx
, EXCP_BREAK
);
14226 gen_shift(ctx
, OPC_SLLV
, ry
, rx
, ry
);
14229 gen_shift(ctx
, OPC_SRLV
, ry
, rx
, ry
);
14232 gen_shift(ctx
, OPC_SRAV
, ry
, rx
, ry
);
14234 #if defined(TARGET_MIPS64)
14236 check_insn(ctx
, ISA_MIPS3
);
14237 check_mips_64(ctx
);
14238 gen_shift_imm(ctx
, OPC_DSRL
, ry
, ry
, sa
);
14242 gen_logic(ctx
, OPC_XOR
, 24, rx
, ry
);
14245 gen_arith(ctx
, OPC_SUBU
, rx
, 0, ry
);
14248 gen_logic(ctx
, OPC_AND
, rx
, rx
, ry
);
14251 gen_logic(ctx
, OPC_OR
, rx
, rx
, ry
);
14254 gen_logic(ctx
, OPC_XOR
, rx
, rx
, ry
);
14257 gen_logic(ctx
, OPC_NOR
, rx
, ry
, 0);
14260 gen_HILO(ctx
, OPC_MFHI
, 0, rx
);
14263 check_insn(ctx
, ISA_MIPS_R1
);
14265 case RR_RY_CNVT_ZEB
:
14266 tcg_gen_ext8u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14268 case RR_RY_CNVT_ZEH
:
14269 tcg_gen_ext16u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14271 case RR_RY_CNVT_SEB
:
14272 tcg_gen_ext8s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14274 case RR_RY_CNVT_SEH
:
14275 tcg_gen_ext16s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14277 #if defined(TARGET_MIPS64)
14278 case RR_RY_CNVT_ZEW
:
14279 check_insn(ctx
, ISA_MIPS_R1
);
14280 check_mips_64(ctx
);
14281 tcg_gen_ext32u_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14283 case RR_RY_CNVT_SEW
:
14284 check_insn(ctx
, ISA_MIPS_R1
);
14285 check_mips_64(ctx
);
14286 tcg_gen_ext32s_tl(cpu_gpr
[rx
], cpu_gpr
[rx
]);
14290 gen_reserved_instruction(ctx
);
14295 gen_HILO(ctx
, OPC_MFLO
, 0, rx
);
14297 #if defined(TARGET_MIPS64)
14299 check_insn(ctx
, ISA_MIPS3
);
14300 check_mips_64(ctx
);
14301 gen_shift_imm(ctx
, OPC_DSRA
, ry
, ry
, sa
);
14304 check_insn(ctx
, ISA_MIPS3
);
14305 check_mips_64(ctx
);
14306 gen_shift(ctx
, OPC_DSLLV
, ry
, rx
, ry
);
14309 check_insn(ctx
, ISA_MIPS3
);
14310 check_mips_64(ctx
);
14311 gen_shift(ctx
, OPC_DSRLV
, ry
, rx
, ry
);
14314 check_insn(ctx
, ISA_MIPS3
);
14315 check_mips_64(ctx
);
14316 gen_shift(ctx
, OPC_DSRAV
, ry
, rx
, ry
);
14320 gen_muldiv(ctx
, OPC_MULT
, 0, rx
, ry
);
14323 gen_muldiv(ctx
, OPC_MULTU
, 0, rx
, ry
);
14326 gen_muldiv(ctx
, OPC_DIV
, 0, rx
, ry
);
14329 gen_muldiv(ctx
, OPC_DIVU
, 0, rx
, ry
);
14331 #if defined(TARGET_MIPS64)
14333 check_insn(ctx
, ISA_MIPS3
);
14334 check_mips_64(ctx
);
14335 gen_muldiv(ctx
, OPC_DMULT
, 0, rx
, ry
);
14338 check_insn(ctx
, ISA_MIPS3
);
14339 check_mips_64(ctx
);
14340 gen_muldiv(ctx
, OPC_DMULTU
, 0, rx
, ry
);
14343 check_insn(ctx
, ISA_MIPS3
);
14344 check_mips_64(ctx
);
14345 gen_muldiv(ctx
, OPC_DDIV
, 0, rx
, ry
);
14348 check_insn(ctx
, ISA_MIPS3
);
14349 check_mips_64(ctx
);
14350 gen_muldiv(ctx
, OPC_DDIVU
, 0, rx
, ry
);
14354 gen_reserved_instruction(ctx
);
14358 case M16_OPC_EXTEND
:
14359 decode_extended_mips16_opc(env
, ctx
);
14362 #if defined(TARGET_MIPS64)
14364 funct
= (ctx
->opcode
>> 8) & 0x7;
14365 decode_i64_mips16(ctx
, ry
, funct
, offset
, 0);
14369 gen_reserved_instruction(ctx
);
14376 /* microMIPS extension to MIPS32/MIPS64 */
14379 * microMIPS32/microMIPS64 major opcodes
14381 * 1. MIPS Architecture for Programmers Volume II-B:
14382 * The microMIPS32 Instruction Set (Revision 3.05)
14384 * Table 6.2 microMIPS32 Encoding of Major Opcode Field
14386 * 2. MIPS Architecture For Programmers Volume II-A:
14387 * The MIPS64 Instruction Set (Revision 3.51)
14417 POOL32S
= 0x16, /* MIPS64 */
14418 DADDIU32
= 0x17, /* MIPS64 */
14447 /* 0x29 is reserved */
14460 /* 0x31 is reserved */
14473 SD32
= 0x36, /* MIPS64 */
14474 LD32
= 0x37, /* MIPS64 */
14476 /* 0x39 is reserved */
14492 /* PCREL Instructions perform PC-Relative address calculation. bits 20..16 */
14514 /* POOL32A encoding of minor opcode field */
14518 * These opcodes are distinguished only by bits 9..6; those bits are
14519 * what are recorded below.
14557 /* The following can be distinguished by their lower 6 bits. */
14567 /* POOL32AXF encoding of minor opcode field extension */
14570 * 1. MIPS Architecture for Programmers Volume II-B:
14571 * The microMIPS32 Instruction Set (Revision 3.05)
14573 * Table 6.5 POOL32Axf Encoding of Minor Opcode Extension Field
14575 * 2. MIPS Architecture for Programmers VolumeIV-e:
14576 * The MIPS DSP Application-Specific Extension
14577 * to the microMIPS32 Architecture (Revision 2.34)
14579 * Table 5.5 POOL32Axf Encoding of Minor Opcode Extension Field
14594 /* begin of microMIPS32 DSP */
14596 /* bits 13..12 for 0x01 */
14602 /* bits 13..12 for 0x2a */
14608 /* bits 13..12 for 0x32 */
14612 /* end of microMIPS32 DSP */
14614 /* bits 15..12 for 0x2c */
14631 /* bits 15..12 for 0x34 */
14639 /* bits 15..12 for 0x3c */
14641 JR
= 0x0, /* alias */
14649 /* bits 15..12 for 0x05 */
14653 /* bits 15..12 for 0x0d */
14665 /* bits 15..12 for 0x15 */
14671 /* bits 15..12 for 0x1d */
14675 /* bits 15..12 for 0x2d */
14680 /* bits 15..12 for 0x35 */
14687 /* POOL32B encoding of minor opcode field (bits 15..12) */
14703 /* POOL32C encoding of minor opcode field (bits 15..12) */
14724 /* POOL32C LD-EVA encoding of minor opcode field (bits 11..9) */
14737 /* POOL32C ST-EVA encoding of minor opcode field (bits 11..9) */
14750 /* POOL32F encoding of minor opcode field (bits 5..0) */
14753 /* These are the bit 7..6 values */
14762 /* These are the bit 8..6 values */
14787 MOVZ_FMT_05
= 0x05,
14821 CABS_COND_FMT
= 0x1c, /* MIPS3D */
14828 /* POOL32Fxf encoding of minor opcode extension field */
14866 /* POOL32I encoding of minor opcode field (bits 25..21) */
14896 /* These overlap and are distinguished by bit16 of the instruction */
14905 /* POOL16A encoding of minor opcode field */
14912 /* POOL16B encoding of minor opcode field */
14919 /* POOL16C encoding of minor opcode field */
14939 /* R6 POOL16C encoding of minor opcode field (bits 0..5) */
14963 /* POOL16D encoding of minor opcode field */
14970 /* POOL16E encoding of minor opcode field */
14977 static int mmreg(int r
)
14979 static const int map
[] = { 16, 17, 2, 3, 4, 5, 6, 7 };
14984 /* Used for 16-bit store instructions. */
14985 static int mmreg2(int r
)
14987 static const int map
[] = { 0, 17, 2, 3, 4, 5, 6, 7 };
14992 #define uMIPS_RD(op) ((op >> 7) & 0x7)
14993 #define uMIPS_RS(op) ((op >> 4) & 0x7)
14994 #define uMIPS_RS2(op) uMIPS_RS(op)
14995 #define uMIPS_RS1(op) ((op >> 1) & 0x7)
14996 #define uMIPS_RD5(op) ((op >> 5) & 0x1f)
14997 #define uMIPS_RS5(op) (op & 0x1f)
14999 /* Signed immediate */
15000 #define SIMM(op, start, width) \
15001 ((int32_t)(((op >> start) & ((~0U) >> (32 - width))) \
15004 /* Zero-extended immediate */
15005 #define ZIMM(op, start, width) ((op >> start) & ((~0U) >> (32 - width)))
15007 static void gen_addiur1sp(DisasContext
*ctx
)
15009 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15011 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, 29, ((ctx
->opcode
>> 1) & 0x3f) << 2);
15014 static void gen_addiur2(DisasContext
*ctx
)
15016 static const int decoded_imm
[] = { 1, 4, 8, 12, 16, 20, 24, -1 };
15017 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15018 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15020 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rs
, decoded_imm
[ZIMM(ctx
->opcode
, 1, 3)]);
15023 static void gen_addiusp(DisasContext
*ctx
)
15025 int encoded
= ZIMM(ctx
->opcode
, 1, 9);
15028 if (encoded
<= 1) {
15029 decoded
= 256 + encoded
;
15030 } else if (encoded
<= 255) {
15032 } else if (encoded
<= 509) {
15033 decoded
= encoded
- 512;
15035 decoded
= encoded
- 768;
15038 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, decoded
<< 2);
15041 static void gen_addius5(DisasContext
*ctx
)
15043 int imm
= SIMM(ctx
->opcode
, 1, 4);
15044 int rd
= (ctx
->opcode
>> 5) & 0x1f;
15046 gen_arith_imm(ctx
, OPC_ADDIU
, rd
, rd
, imm
);
15049 static void gen_andi16(DisasContext
*ctx
)
15051 static const int decoded_imm
[] = { 128, 1, 2, 3, 4, 7, 8, 15, 16,
15052 31, 32, 63, 64, 255, 32768, 65535 };
15053 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
15054 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
15055 int encoded
= ZIMM(ctx
->opcode
, 0, 4);
15057 gen_logic_imm(ctx
, OPC_ANDI
, rd
, rs
, decoded_imm
[encoded
]);
15060 static void gen_ldst_multiple(DisasContext
*ctx
, uint32_t opc
, int reglist
,
15061 int base
, int16_t offset
)
15066 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
15067 gen_reserved_instruction(ctx
);
15071 t0
= tcg_temp_new();
15073 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15075 t1
= tcg_const_tl(reglist
);
15076 t2
= tcg_const_i32(ctx
->mem_idx
);
15078 save_cpu_state(ctx
, 1);
15081 gen_helper_lwm(cpu_env
, t0
, t1
, t2
);
15084 gen_helper_swm(cpu_env
, t0
, t1
, t2
);
15086 #ifdef TARGET_MIPS64
15088 gen_helper_ldm(cpu_env
, t0
, t1
, t2
);
15091 gen_helper_sdm(cpu_env
, t0
, t1
, t2
);
15097 tcg_temp_free_i32(t2
);
15101 static void gen_pool16c_insn(DisasContext
*ctx
)
15103 int rd
= mmreg((ctx
->opcode
>> 3) & 0x7);
15104 int rs
= mmreg(ctx
->opcode
& 0x7);
15106 switch (((ctx
->opcode
) >> 4) & 0x3f) {
15111 gen_logic(ctx
, OPC_NOR
, rd
, rs
, 0);
15117 gen_logic(ctx
, OPC_XOR
, rd
, rd
, rs
);
15123 gen_logic(ctx
, OPC_AND
, rd
, rd
, rs
);
15129 gen_logic(ctx
, OPC_OR
, rd
, rd
, rs
);
15136 static const int lwm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15137 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15139 gen_ldst_multiple(ctx
, LWM32
, lwm_convert
[(ctx
->opcode
>> 4) & 0x3],
15148 static const int swm_convert
[] = { 0x11, 0x12, 0x13, 0x14 };
15149 int offset
= ZIMM(ctx
->opcode
, 0, 4);
15151 gen_ldst_multiple(ctx
, SWM32
, swm_convert
[(ctx
->opcode
>> 4) & 0x3],
15158 int reg
= ctx
->opcode
& 0x1f;
15160 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 4);
15166 int reg
= ctx
->opcode
& 0x1f;
15167 gen_compute_branch(ctx
, OPC_JR
, 2, reg
, 0, 0, 0);
15169 * Let normal delay slot handling in our caller take us
15170 * to the branch target.
15176 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 4);
15177 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15181 gen_compute_branch(ctx
, OPC_JALR
, 2, ctx
->opcode
& 0x1f, 31, 0, 2);
15182 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15186 gen_HILO(ctx
, OPC_MFHI
, 0, uMIPS_RS5(ctx
->opcode
));
15190 gen_HILO(ctx
, OPC_MFLO
, 0, uMIPS_RS5(ctx
->opcode
));
15193 generate_exception_end(ctx
, EXCP_BREAK
);
15196 if (is_uhi(extract32(ctx
->opcode
, 0, 4))) {
15197 gen_helper_do_semihosting(cpu_env
);
15200 * XXX: not clear which exception should be raised
15201 * when in debug mode...
15203 check_insn(ctx
, ISA_MIPS_R1
);
15204 generate_exception_end(ctx
, EXCP_DBp
);
15207 case JRADDIUSP
+ 0:
15208 case JRADDIUSP
+ 1:
15210 int imm
= ZIMM(ctx
->opcode
, 0, 5);
15211 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15212 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15214 * Let normal delay slot handling in our caller take us
15215 * to the branch target.
15220 gen_reserved_instruction(ctx
);
15225 static inline void gen_movep(DisasContext
*ctx
, int enc_dest
, int enc_rt
,
15228 int rd
, rs
, re
, rt
;
15229 static const int rd_enc
[] = { 5, 5, 6, 4, 4, 4, 4, 4 };
15230 static const int re_enc
[] = { 6, 7, 7, 21, 22, 5, 6, 7 };
15231 static const int rs_rt_enc
[] = { 0, 17, 2, 3, 16, 18, 19, 20 };
15232 rd
= rd_enc
[enc_dest
];
15233 re
= re_enc
[enc_dest
];
15234 rs
= rs_rt_enc
[enc_rs
];
15235 rt
= rs_rt_enc
[enc_rt
];
15237 tcg_gen_mov_tl(cpu_gpr
[rd
], cpu_gpr
[rs
]);
15239 tcg_gen_movi_tl(cpu_gpr
[rd
], 0);
15242 tcg_gen_mov_tl(cpu_gpr
[re
], cpu_gpr
[rt
]);
15244 tcg_gen_movi_tl(cpu_gpr
[re
], 0);
15248 static void gen_pool16c_r6_insn(DisasContext
*ctx
)
15250 int rt
= mmreg((ctx
->opcode
>> 7) & 0x7);
15251 int rs
= mmreg((ctx
->opcode
>> 4) & 0x7);
15253 switch (ctx
->opcode
& 0xf) {
15255 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
15258 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
15262 int lwm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15263 int offset
= extract32(ctx
->opcode
, 4, 4);
15264 gen_ldst_multiple(ctx
, LWM32
, lwm_converted
, 29, offset
<< 2);
15267 case R6_JRC16
: /* JRCADDIUSP */
15268 if ((ctx
->opcode
>> 4) & 1) {
15270 int imm
= extract32(ctx
->opcode
, 5, 5);
15271 gen_compute_branch(ctx
, OPC_JR
, 2, 31, 0, 0, 0);
15272 gen_arith_imm(ctx
, OPC_ADDIU
, 29, 29, imm
<< 2);
15275 rs
= extract32(ctx
->opcode
, 5, 5);
15276 gen_compute_branch(ctx
, OPC_JR
, 2, rs
, 0, 0, 0);
15288 int enc_dest
= uMIPS_RD(ctx
->opcode
);
15289 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
15290 int enc_rs
= (ctx
->opcode
& 3) | ((ctx
->opcode
>> 1) & 4);
15291 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
15295 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
15298 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
15302 int swm_converted
= 0x11 + extract32(ctx
->opcode
, 8, 2);
15303 int offset
= extract32(ctx
->opcode
, 4, 4);
15304 gen_ldst_multiple(ctx
, SWM32
, swm_converted
, 29, offset
<< 2);
15307 case JALRC16
: /* BREAK16, SDBBP16 */
15308 switch (ctx
->opcode
& 0x3f) {
15310 case JALRC16
+ 0x20:
15312 gen_compute_branch(ctx
, OPC_JALR
, 2, (ctx
->opcode
>> 5) & 0x1f,
15317 generate_exception(ctx
, EXCP_BREAK
);
15321 if (is_uhi(extract32(ctx
->opcode
, 6, 4))) {
15322 gen_helper_do_semihosting(cpu_env
);
15324 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15325 generate_exception(ctx
, EXCP_RI
);
15327 generate_exception(ctx
, EXCP_DBp
);
15334 generate_exception(ctx
, EXCP_RI
);
15339 static void gen_ldxs(DisasContext
*ctx
, int base
, int index
, int rd
)
15341 TCGv t0
= tcg_temp_new();
15342 TCGv t1
= tcg_temp_new();
15344 gen_load_gpr(t0
, base
);
15347 gen_load_gpr(t1
, index
);
15348 tcg_gen_shli_tl(t1
, t1
, 2);
15349 gen_op_addr_add(ctx
, t0
, t1
, t0
);
15352 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15353 gen_store_gpr(t1
, rd
);
15359 static void gen_ldst_pair(DisasContext
*ctx
, uint32_t opc
, int rd
,
15360 int base
, int16_t offset
)
15364 if (ctx
->hflags
& MIPS_HFLAG_BMASK
|| rd
== 31) {
15365 gen_reserved_instruction(ctx
);
15369 t0
= tcg_temp_new();
15370 t1
= tcg_temp_new();
15372 gen_base_offset_addr(ctx
, t0
, base
, offset
);
15377 gen_reserved_instruction(ctx
);
15380 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15381 gen_store_gpr(t1
, rd
);
15382 tcg_gen_movi_tl(t1
, 4);
15383 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15384 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TESL
);
15385 gen_store_gpr(t1
, rd
+ 1);
15388 gen_load_gpr(t1
, rd
);
15389 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15390 tcg_gen_movi_tl(t1
, 4);
15391 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15392 gen_load_gpr(t1
, rd
+ 1);
15393 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
15395 #ifdef TARGET_MIPS64
15398 gen_reserved_instruction(ctx
);
15401 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15402 gen_store_gpr(t1
, rd
);
15403 tcg_gen_movi_tl(t1
, 8);
15404 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15405 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15406 gen_store_gpr(t1
, rd
+ 1);
15409 gen_load_gpr(t1
, rd
);
15410 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15411 tcg_gen_movi_tl(t1
, 8);
15412 gen_op_addr_add(ctx
, t0
, t0
, t1
);
15413 gen_load_gpr(t1
, rd
+ 1);
15414 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEQ
);
15422 static void gen_sync(int stype
)
15424 TCGBar tcg_mo
= TCG_BAR_SC
;
15427 case 0x4: /* SYNC_WMB */
15428 tcg_mo
|= TCG_MO_ST_ST
;
15430 case 0x10: /* SYNC_MB */
15431 tcg_mo
|= TCG_MO_ALL
;
15433 case 0x11: /* SYNC_ACQUIRE */
15434 tcg_mo
|= TCG_MO_LD_LD
| TCG_MO_LD_ST
;
15436 case 0x12: /* SYNC_RELEASE */
15437 tcg_mo
|= TCG_MO_ST_ST
| TCG_MO_LD_ST
;
15439 case 0x13: /* SYNC_RMB */
15440 tcg_mo
|= TCG_MO_LD_LD
;
15443 tcg_mo
|= TCG_MO_ALL
;
15447 tcg_gen_mb(tcg_mo
);
15450 static void gen_pool32axf(CPUMIPSState
*env
, DisasContext
*ctx
, int rt
, int rs
)
15452 int extension
= (ctx
->opcode
>> 6) & 0x3f;
15453 int minor
= (ctx
->opcode
>> 12) & 0xf;
15454 uint32_t mips32_op
;
15456 switch (extension
) {
15458 mips32_op
= OPC_TEQ
;
15461 mips32_op
= OPC_TGE
;
15464 mips32_op
= OPC_TGEU
;
15467 mips32_op
= OPC_TLT
;
15470 mips32_op
= OPC_TLTU
;
15473 mips32_op
= OPC_TNE
;
15475 gen_trap(ctx
, mips32_op
, rs
, rt
, -1);
15477 #ifndef CONFIG_USER_ONLY
15480 check_cp0_enabled(ctx
);
15482 /* Treat as NOP. */
15485 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, (ctx
->opcode
>> 11) & 0x7);
15489 check_cp0_enabled(ctx
);
15491 TCGv t0
= tcg_temp_new();
15493 gen_load_gpr(t0
, rt
);
15494 gen_mtc0(ctx
, t0
, rs
, (ctx
->opcode
>> 11) & 0x7);
15500 switch (minor
& 3) {
15502 gen_muldiv(ctx
, OPC_MADD
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15505 gen_muldiv(ctx
, OPC_MADDU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15508 gen_muldiv(ctx
, OPC_MSUB
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15511 gen_muldiv(ctx
, OPC_MSUBU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15514 goto pool32axf_invalid
;
15518 switch (minor
& 3) {
15520 gen_muldiv(ctx
, OPC_MULT
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15523 gen_muldiv(ctx
, OPC_MULTU
, (ctx
->opcode
>> 14) & 3, rs
, rt
);
15526 goto pool32axf_invalid
;
15532 check_insn(ctx
, ISA_MIPS_R6
);
15533 gen_bitswap(ctx
, OPC_BITSWAP
, rs
, rt
);
15536 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
15539 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
15542 mips32_op
= OPC_CLO
;
15545 mips32_op
= OPC_CLZ
;
15547 check_insn(ctx
, ISA_MIPS_R1
);
15548 gen_cl(ctx
, mips32_op
, rt
, rs
);
15551 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15552 gen_rdhwr(ctx
, rt
, rs
, 0);
15555 gen_bshfl(ctx
, OPC_WSBH
, rs
, rt
);
15558 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15559 mips32_op
= OPC_MULT
;
15562 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15563 mips32_op
= OPC_MULTU
;
15566 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15567 mips32_op
= OPC_DIV
;
15570 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15571 mips32_op
= OPC_DIVU
;
15574 check_insn(ctx
, ISA_MIPS_R1
);
15575 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15578 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15579 mips32_op
= OPC_MADD
;
15582 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15583 mips32_op
= OPC_MADDU
;
15586 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15587 mips32_op
= OPC_MSUB
;
15590 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15591 mips32_op
= OPC_MSUBU
;
15593 check_insn(ctx
, ISA_MIPS_R1
);
15594 gen_muldiv(ctx
, mips32_op
, 0, rs
, rt
);
15597 goto pool32axf_invalid
;
15608 generate_exception_err(ctx
, EXCP_CpU
, 2);
15611 goto pool32axf_invalid
;
15616 case JALR
: /* JALRC */
15617 case JALR_HB
: /* JALRC_HB */
15618 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
15619 /* JALRC, JALRC_HB */
15620 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 0);
15622 /* JALR, JALR_HB */
15623 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 4);
15624 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15629 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15630 gen_compute_branch(ctx
, OPC_JALR
, 4, rs
, rt
, 0, 2);
15631 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
15634 goto pool32axf_invalid
;
15640 check_cp0_enabled(ctx
);
15641 check_insn(ctx
, ISA_MIPS_R2
);
15642 gen_load_srsgpr(rs
, rt
);
15645 check_cp0_enabled(ctx
);
15646 check_insn(ctx
, ISA_MIPS_R2
);
15647 gen_store_srsgpr(rs
, rt
);
15650 goto pool32axf_invalid
;
15653 #ifndef CONFIG_USER_ONLY
15657 mips32_op
= OPC_TLBP
;
15660 mips32_op
= OPC_TLBR
;
15663 mips32_op
= OPC_TLBWI
;
15666 mips32_op
= OPC_TLBWR
;
15669 mips32_op
= OPC_TLBINV
;
15672 mips32_op
= OPC_TLBINVF
;
15675 mips32_op
= OPC_WAIT
;
15678 mips32_op
= OPC_DERET
;
15681 mips32_op
= OPC_ERET
;
15683 gen_cp0(env
, ctx
, mips32_op
, rt
, rs
);
15686 goto pool32axf_invalid
;
15692 check_cp0_enabled(ctx
);
15694 TCGv t0
= tcg_temp_new();
15696 save_cpu_state(ctx
, 1);
15697 gen_helper_di(t0
, cpu_env
);
15698 gen_store_gpr(t0
, rs
);
15700 * Stop translation as we may have switched the execution
15703 ctx
->base
.is_jmp
= DISAS_STOP
;
15708 check_cp0_enabled(ctx
);
15710 TCGv t0
= tcg_temp_new();
15712 save_cpu_state(ctx
, 1);
15713 gen_helper_ei(t0
, cpu_env
);
15714 gen_store_gpr(t0
, rs
);
15716 * DISAS_STOP isn't sufficient, we need to ensure we break out
15717 * of translated code to check for pending interrupts.
15719 gen_save_pc(ctx
->base
.pc_next
+ 4);
15720 ctx
->base
.is_jmp
= DISAS_EXIT
;
15725 goto pool32axf_invalid
;
15732 gen_sync(extract32(ctx
->opcode
, 16, 5));
15735 generate_exception_end(ctx
, EXCP_SYSCALL
);
15738 if (is_uhi(extract32(ctx
->opcode
, 16, 10))) {
15739 gen_helper_do_semihosting(cpu_env
);
15741 check_insn(ctx
, ISA_MIPS_R1
);
15742 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
15743 gen_reserved_instruction(ctx
);
15745 generate_exception_end(ctx
, EXCP_DBp
);
15750 goto pool32axf_invalid
;
15754 switch (minor
& 3) {
15756 gen_HILO(ctx
, OPC_MFHI
, minor
>> 2, rs
);
15759 gen_HILO(ctx
, OPC_MFLO
, minor
>> 2, rs
);
15762 gen_HILO(ctx
, OPC_MTHI
, minor
>> 2, rs
);
15765 gen_HILO(ctx
, OPC_MTLO
, minor
>> 2, rs
);
15768 goto pool32axf_invalid
;
15772 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
15775 gen_HILO(ctx
, OPC_MFHI
, 0, rs
);
15778 gen_HILO(ctx
, OPC_MFLO
, 0, rs
);
15781 gen_HILO(ctx
, OPC_MTHI
, 0, rs
);
15784 gen_HILO(ctx
, OPC_MTLO
, 0, rs
);
15787 goto pool32axf_invalid
;
15792 MIPS_INVAL("pool32axf");
15793 gen_reserved_instruction(ctx
);
15799 * Values for microMIPS fmt field. Variable-width, depending on which
15800 * formats the instruction supports.
15819 static void gen_pool32fxf(DisasContext
*ctx
, int rt
, int rs
)
15821 int extension
= (ctx
->opcode
>> 6) & 0x3ff;
15822 uint32_t mips32_op
;
15824 #define FLOAT_1BIT_FMT(opc, fmt) ((fmt << 8) | opc)
15825 #define FLOAT_2BIT_FMT(opc, fmt) ((fmt << 7) | opc)
15826 #define COND_FLOAT_MOV(opc, cond) ((cond << 7) | opc)
15828 switch (extension
) {
15829 case FLOAT_1BIT_FMT(CFC1
, 0):
15830 mips32_op
= OPC_CFC1
;
15832 case FLOAT_1BIT_FMT(CTC1
, 0):
15833 mips32_op
= OPC_CTC1
;
15835 case FLOAT_1BIT_FMT(MFC1
, 0):
15836 mips32_op
= OPC_MFC1
;
15838 case FLOAT_1BIT_FMT(MTC1
, 0):
15839 mips32_op
= OPC_MTC1
;
15841 case FLOAT_1BIT_FMT(MFHC1
, 0):
15842 mips32_op
= OPC_MFHC1
;
15844 case FLOAT_1BIT_FMT(MTHC1
, 0):
15845 mips32_op
= OPC_MTHC1
;
15847 gen_cp1(ctx
, mips32_op
, rt
, rs
);
15850 /* Reciprocal square root */
15851 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_S
):
15852 mips32_op
= OPC_RSQRT_S
;
15854 case FLOAT_1BIT_FMT(RSQRT_FMT
, FMT_SD_D
):
15855 mips32_op
= OPC_RSQRT_D
;
15859 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_S
):
15860 mips32_op
= OPC_SQRT_S
;
15862 case FLOAT_1BIT_FMT(SQRT_FMT
, FMT_SD_D
):
15863 mips32_op
= OPC_SQRT_D
;
15867 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_S
):
15868 mips32_op
= OPC_RECIP_S
;
15870 case FLOAT_1BIT_FMT(RECIP_FMT
, FMT_SD_D
):
15871 mips32_op
= OPC_RECIP_D
;
15875 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_S
):
15876 mips32_op
= OPC_FLOOR_L_S
;
15878 case FLOAT_1BIT_FMT(FLOOR_L
, FMT_SD_D
):
15879 mips32_op
= OPC_FLOOR_L_D
;
15881 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_S
):
15882 mips32_op
= OPC_FLOOR_W_S
;
15884 case FLOAT_1BIT_FMT(FLOOR_W
, FMT_SD_D
):
15885 mips32_op
= OPC_FLOOR_W_D
;
15889 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_S
):
15890 mips32_op
= OPC_CEIL_L_S
;
15892 case FLOAT_1BIT_FMT(CEIL_L
, FMT_SD_D
):
15893 mips32_op
= OPC_CEIL_L_D
;
15895 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_S
):
15896 mips32_op
= OPC_CEIL_W_S
;
15898 case FLOAT_1BIT_FMT(CEIL_W
, FMT_SD_D
):
15899 mips32_op
= OPC_CEIL_W_D
;
15903 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_S
):
15904 mips32_op
= OPC_TRUNC_L_S
;
15906 case FLOAT_1BIT_FMT(TRUNC_L
, FMT_SD_D
):
15907 mips32_op
= OPC_TRUNC_L_D
;
15909 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_S
):
15910 mips32_op
= OPC_TRUNC_W_S
;
15912 case FLOAT_1BIT_FMT(TRUNC_W
, FMT_SD_D
):
15913 mips32_op
= OPC_TRUNC_W_D
;
15917 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_S
):
15918 mips32_op
= OPC_ROUND_L_S
;
15920 case FLOAT_1BIT_FMT(ROUND_L
, FMT_SD_D
):
15921 mips32_op
= OPC_ROUND_L_D
;
15923 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_S
):
15924 mips32_op
= OPC_ROUND_W_S
;
15926 case FLOAT_1BIT_FMT(ROUND_W
, FMT_SD_D
):
15927 mips32_op
= OPC_ROUND_W_D
;
15930 /* Integer to floating-point conversion */
15931 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_S
):
15932 mips32_op
= OPC_CVT_L_S
;
15934 case FLOAT_1BIT_FMT(CVT_L
, FMT_SD_D
):
15935 mips32_op
= OPC_CVT_L_D
;
15937 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_S
):
15938 mips32_op
= OPC_CVT_W_S
;
15940 case FLOAT_1BIT_FMT(CVT_W
, FMT_SD_D
):
15941 mips32_op
= OPC_CVT_W_D
;
15944 /* Paired-foo conversions */
15945 case FLOAT_1BIT_FMT(CVT_S_PL
, 0):
15946 mips32_op
= OPC_CVT_S_PL
;
15948 case FLOAT_1BIT_FMT(CVT_S_PU
, 0):
15949 mips32_op
= OPC_CVT_S_PU
;
15951 case FLOAT_1BIT_FMT(CVT_PW_PS
, 0):
15952 mips32_op
= OPC_CVT_PW_PS
;
15954 case FLOAT_1BIT_FMT(CVT_PS_PW
, 0):
15955 mips32_op
= OPC_CVT_PS_PW
;
15958 /* Floating-point moves */
15959 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_S
):
15960 mips32_op
= OPC_MOV_S
;
15962 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_D
):
15963 mips32_op
= OPC_MOV_D
;
15965 case FLOAT_2BIT_FMT(MOV_FMT
, FMT_SDPS_PS
):
15966 mips32_op
= OPC_MOV_PS
;
15969 /* Absolute value */
15970 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_S
):
15971 mips32_op
= OPC_ABS_S
;
15973 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_D
):
15974 mips32_op
= OPC_ABS_D
;
15976 case FLOAT_2BIT_FMT(ABS_FMT
, FMT_SDPS_PS
):
15977 mips32_op
= OPC_ABS_PS
;
15981 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_S
):
15982 mips32_op
= OPC_NEG_S
;
15984 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_D
):
15985 mips32_op
= OPC_NEG_D
;
15987 case FLOAT_2BIT_FMT(NEG_FMT
, FMT_SDPS_PS
):
15988 mips32_op
= OPC_NEG_PS
;
15991 /* Reciprocal square root step */
15992 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_S
):
15993 mips32_op
= OPC_RSQRT1_S
;
15995 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_D
):
15996 mips32_op
= OPC_RSQRT1_D
;
15998 case FLOAT_2BIT_FMT(RSQRT1_FMT
, FMT_SDPS_PS
):
15999 mips32_op
= OPC_RSQRT1_PS
;
16002 /* Reciprocal step */
16003 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_S
):
16004 mips32_op
= OPC_RECIP1_S
;
16006 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_D
):
16007 mips32_op
= OPC_RECIP1_S
;
16009 case FLOAT_2BIT_FMT(RECIP1_FMT
, FMT_SDPS_PS
):
16010 mips32_op
= OPC_RECIP1_PS
;
16013 /* Conversions from double */
16014 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_S
):
16015 mips32_op
= OPC_CVT_D_S
;
16017 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_W
):
16018 mips32_op
= OPC_CVT_D_W
;
16020 case FLOAT_2BIT_FMT(CVT_D
, FMT_SWL_L
):
16021 mips32_op
= OPC_CVT_D_L
;
16024 /* Conversions from single */
16025 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_D
):
16026 mips32_op
= OPC_CVT_S_D
;
16028 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_W
):
16029 mips32_op
= OPC_CVT_S_W
;
16031 case FLOAT_2BIT_FMT(CVT_S
, FMT_DWL_L
):
16032 mips32_op
= OPC_CVT_S_L
;
16034 gen_farith(ctx
, mips32_op
, -1, rs
, rt
, 0);
16037 /* Conditional moves on floating-point codes */
16038 case COND_FLOAT_MOV(MOVT
, 0):
16039 case COND_FLOAT_MOV(MOVT
, 1):
16040 case COND_FLOAT_MOV(MOVT
, 2):
16041 case COND_FLOAT_MOV(MOVT
, 3):
16042 case COND_FLOAT_MOV(MOVT
, 4):
16043 case COND_FLOAT_MOV(MOVT
, 5):
16044 case COND_FLOAT_MOV(MOVT
, 6):
16045 case COND_FLOAT_MOV(MOVT
, 7):
16046 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16047 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 1);
16049 case COND_FLOAT_MOV(MOVF
, 0):
16050 case COND_FLOAT_MOV(MOVF
, 1):
16051 case COND_FLOAT_MOV(MOVF
, 2):
16052 case COND_FLOAT_MOV(MOVF
, 3):
16053 case COND_FLOAT_MOV(MOVF
, 4):
16054 case COND_FLOAT_MOV(MOVF
, 5):
16055 case COND_FLOAT_MOV(MOVF
, 6):
16056 case COND_FLOAT_MOV(MOVF
, 7):
16057 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16058 gen_movci(ctx
, rt
, rs
, (ctx
->opcode
>> 13) & 0x7, 0);
16061 MIPS_INVAL("pool32fxf");
16062 gen_reserved_instruction(ctx
);
16067 static void decode_micromips32_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
16071 int rt
, rs
, rd
, rr
;
16073 uint32_t op
, minor
, minor2
, mips32_op
;
16074 uint32_t cond
, fmt
, cc
;
16076 insn
= translator_lduw(env
, ctx
->base
.pc_next
+ 2);
16077 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
16079 rt
= (ctx
->opcode
>> 21) & 0x1f;
16080 rs
= (ctx
->opcode
>> 16) & 0x1f;
16081 rd
= (ctx
->opcode
>> 11) & 0x1f;
16082 rr
= (ctx
->opcode
>> 6) & 0x1f;
16083 imm
= (int16_t) ctx
->opcode
;
16085 op
= (ctx
->opcode
>> 26) & 0x3f;
16088 minor
= ctx
->opcode
& 0x3f;
16091 minor
= (ctx
->opcode
>> 6) & 0xf;
16094 mips32_op
= OPC_SLL
;
16097 mips32_op
= OPC_SRA
;
16100 mips32_op
= OPC_SRL
;
16103 mips32_op
= OPC_ROTR
;
16105 gen_shift_imm(ctx
, mips32_op
, rt
, rs
, rd
);
16108 check_insn(ctx
, ISA_MIPS_R6
);
16109 gen_cond_move(ctx
, OPC_SELEQZ
, rd
, rs
, rt
);
16112 check_insn(ctx
, ISA_MIPS_R6
);
16113 gen_cond_move(ctx
, OPC_SELNEZ
, rd
, rs
, rt
);
16116 check_insn(ctx
, ISA_MIPS_R6
);
16117 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
16120 goto pool32a_invalid
;
16124 minor
= (ctx
->opcode
>> 6) & 0xf;
16128 mips32_op
= OPC_ADD
;
16131 mips32_op
= OPC_ADDU
;
16134 mips32_op
= OPC_SUB
;
16137 mips32_op
= OPC_SUBU
;
16140 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16141 mips32_op
= OPC_MUL
;
16143 gen_arith(ctx
, mips32_op
, rd
, rs
, rt
);
16147 mips32_op
= OPC_SLLV
;
16150 mips32_op
= OPC_SRLV
;
16153 mips32_op
= OPC_SRAV
;
16156 mips32_op
= OPC_ROTRV
;
16158 gen_shift(ctx
, mips32_op
, rd
, rs
, rt
);
16160 /* Logical operations */
16162 mips32_op
= OPC_AND
;
16165 mips32_op
= OPC_OR
;
16168 mips32_op
= OPC_NOR
;
16171 mips32_op
= OPC_XOR
;
16173 gen_logic(ctx
, mips32_op
, rd
, rs
, rt
);
16175 /* Set less than */
16177 mips32_op
= OPC_SLT
;
16180 mips32_op
= OPC_SLTU
;
16182 gen_slt(ctx
, mips32_op
, rd
, rs
, rt
);
16185 goto pool32a_invalid
;
16189 minor
= (ctx
->opcode
>> 6) & 0xf;
16191 /* Conditional moves */
16192 case MOVN
: /* MUL */
16193 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16195 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
16198 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
16201 case MOVZ
: /* MUH */
16202 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16204 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
16207 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
16211 check_insn(ctx
, ISA_MIPS_R6
);
16212 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
16215 check_insn(ctx
, ISA_MIPS_R6
);
16216 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
16218 case LWXS
: /* DIV */
16219 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16221 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
16224 gen_ldxs(ctx
, rs
, rt
, rd
);
16228 check_insn(ctx
, ISA_MIPS_R6
);
16229 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
16232 check_insn(ctx
, ISA_MIPS_R6
);
16233 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
16236 check_insn(ctx
, ISA_MIPS_R6
);
16237 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
16240 goto pool32a_invalid
;
16244 gen_bitops(ctx
, OPC_INS
, rt
, rs
, rr
, rd
);
16247 check_insn(ctx
, ISA_MIPS_R6
);
16248 gen_lsa(ctx
, rd
, rt
, rs
, extract32(ctx
->opcode
, 9, 2));
16251 check_insn(ctx
, ISA_MIPS_R6
);
16252 gen_align(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 9, 2));
16255 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, rr
, rd
);
16258 gen_pool32axf(env
, ctx
, rt
, rs
);
16261 generate_exception_end(ctx
, EXCP_BREAK
);
16264 check_insn(ctx
, ISA_MIPS_R6
);
16265 gen_reserved_instruction(ctx
);
16269 MIPS_INVAL("pool32a");
16270 gen_reserved_instruction(ctx
);
16275 minor
= (ctx
->opcode
>> 12) & 0xf;
16278 check_cp0_enabled(ctx
);
16279 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
16280 gen_cache_operation(ctx
, rt
, rs
, imm
);
16285 /* COP2: Not implemented. */
16286 generate_exception_err(ctx
, EXCP_CpU
, 2);
16288 #ifdef TARGET_MIPS64
16291 check_insn(ctx
, ISA_MIPS3
);
16292 check_mips_64(ctx
);
16297 gen_ldst_pair(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16299 #ifdef TARGET_MIPS64
16302 check_insn(ctx
, ISA_MIPS3
);
16303 check_mips_64(ctx
);
16308 gen_ldst_multiple(ctx
, minor
, rt
, rs
, SIMM(ctx
->opcode
, 0, 12));
16311 MIPS_INVAL("pool32b");
16312 gen_reserved_instruction(ctx
);
16317 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
16318 minor
= ctx
->opcode
& 0x3f;
16319 check_cp1_enabled(ctx
);
16322 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16323 mips32_op
= OPC_ALNV_PS
;
16326 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16327 mips32_op
= OPC_MADD_S
;
16330 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16331 mips32_op
= OPC_MADD_D
;
16334 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16335 mips32_op
= OPC_MADD_PS
;
16338 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16339 mips32_op
= OPC_MSUB_S
;
16342 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16343 mips32_op
= OPC_MSUB_D
;
16346 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16347 mips32_op
= OPC_MSUB_PS
;
16350 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16351 mips32_op
= OPC_NMADD_S
;
16354 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16355 mips32_op
= OPC_NMADD_D
;
16358 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16359 mips32_op
= OPC_NMADD_PS
;
16362 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16363 mips32_op
= OPC_NMSUB_S
;
16366 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16367 mips32_op
= OPC_NMSUB_D
;
16370 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16371 mips32_op
= OPC_NMSUB_PS
;
16373 gen_flt3_arith(ctx
, mips32_op
, rd
, rr
, rs
, rt
);
16375 case CABS_COND_FMT
:
16376 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16377 cond
= (ctx
->opcode
>> 6) & 0xf;
16378 cc
= (ctx
->opcode
>> 13) & 0x7;
16379 fmt
= (ctx
->opcode
>> 10) & 0x3;
16382 gen_cmpabs_s(ctx
, cond
, rt
, rs
, cc
);
16385 gen_cmpabs_d(ctx
, cond
, rt
, rs
, cc
);
16388 gen_cmpabs_ps(ctx
, cond
, rt
, rs
, cc
);
16391 goto pool32f_invalid
;
16395 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16396 cond
= (ctx
->opcode
>> 6) & 0xf;
16397 cc
= (ctx
->opcode
>> 13) & 0x7;
16398 fmt
= (ctx
->opcode
>> 10) & 0x3;
16401 gen_cmp_s(ctx
, cond
, rt
, rs
, cc
);
16404 gen_cmp_d(ctx
, cond
, rt
, rs
, cc
);
16407 gen_cmp_ps(ctx
, cond
, rt
, rs
, cc
);
16410 goto pool32f_invalid
;
16414 check_insn(ctx
, ISA_MIPS_R6
);
16415 gen_r6_cmp_s(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16418 check_insn(ctx
, ISA_MIPS_R6
);
16419 gen_r6_cmp_d(ctx
, (ctx
->opcode
>> 6) & 0x1f, rt
, rs
, rd
);
16422 gen_pool32fxf(ctx
, rt
, rs
);
16426 switch ((ctx
->opcode
>> 6) & 0x7) {
16428 mips32_op
= OPC_PLL_PS
;
16431 mips32_op
= OPC_PLU_PS
;
16434 mips32_op
= OPC_PUL_PS
;
16437 mips32_op
= OPC_PUU_PS
;
16440 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16441 mips32_op
= OPC_CVT_PS_S
;
16443 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16446 goto pool32f_invalid
;
16450 check_insn(ctx
, ISA_MIPS_R6
);
16451 switch ((ctx
->opcode
>> 9) & 0x3) {
16453 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
16456 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
16459 goto pool32f_invalid
;
16464 switch ((ctx
->opcode
>> 6) & 0x7) {
16466 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16467 mips32_op
= OPC_LWXC1
;
16470 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16471 mips32_op
= OPC_SWXC1
;
16474 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16475 mips32_op
= OPC_LDXC1
;
16478 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16479 mips32_op
= OPC_SDXC1
;
16482 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16483 mips32_op
= OPC_LUXC1
;
16486 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16487 mips32_op
= OPC_SUXC1
;
16489 gen_flt3_ldst(ctx
, mips32_op
, rd
, rd
, rt
, rs
);
16492 goto pool32f_invalid
;
16496 check_insn(ctx
, ISA_MIPS_R6
);
16497 switch ((ctx
->opcode
>> 9) & 0x3) {
16499 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
16502 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
16505 goto pool32f_invalid
;
16510 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16511 fmt
= (ctx
->opcode
>> 9) & 0x3;
16512 switch ((ctx
->opcode
>> 6) & 0x7) {
16516 mips32_op
= OPC_RSQRT2_S
;
16519 mips32_op
= OPC_RSQRT2_D
;
16522 mips32_op
= OPC_RSQRT2_PS
;
16525 goto pool32f_invalid
;
16531 mips32_op
= OPC_RECIP2_S
;
16534 mips32_op
= OPC_RECIP2_D
;
16537 mips32_op
= OPC_RECIP2_PS
;
16540 goto pool32f_invalid
;
16544 mips32_op
= OPC_ADDR_PS
;
16547 mips32_op
= OPC_MULR_PS
;
16549 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16552 goto pool32f_invalid
;
16556 /* MOV[FT].fmt, PREFX, RINT.fmt, CLASS.fmt*/
16557 cc
= (ctx
->opcode
>> 13) & 0x7;
16558 fmt
= (ctx
->opcode
>> 9) & 0x3;
16559 switch ((ctx
->opcode
>> 6) & 0x7) {
16560 case MOVF_FMT
: /* RINT_FMT */
16561 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16565 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
16568 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
16571 goto pool32f_invalid
;
16577 gen_movcf_s(ctx
, rs
, rt
, cc
, 0);
16580 gen_movcf_d(ctx
, rs
, rt
, cc
, 0);
16584 gen_movcf_ps(ctx
, rs
, rt
, cc
, 0);
16587 goto pool32f_invalid
;
16591 case MOVT_FMT
: /* CLASS_FMT */
16592 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16596 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
16599 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
16602 goto pool32f_invalid
;
16608 gen_movcf_s(ctx
, rs
, rt
, cc
, 1);
16611 gen_movcf_d(ctx
, rs
, rt
, cc
, 1);
16615 gen_movcf_ps(ctx
, rs
, rt
, cc
, 1);
16618 goto pool32f_invalid
;
16623 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16626 goto pool32f_invalid
;
16629 #define FINSN_3ARG_SDPS(prfx) \
16630 switch ((ctx->opcode >> 8) & 0x3) { \
16632 mips32_op = OPC_##prfx##_S; \
16635 mips32_op = OPC_##prfx##_D; \
16637 case FMT_SDPS_PS: \
16639 mips32_op = OPC_##prfx##_PS; \
16642 goto pool32f_invalid; \
16645 check_insn(ctx
, ISA_MIPS_R6
);
16646 switch ((ctx
->opcode
>> 9) & 0x3) {
16648 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
16651 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
16654 goto pool32f_invalid
;
16658 check_insn(ctx
, ISA_MIPS_R6
);
16659 switch ((ctx
->opcode
>> 9) & 0x3) {
16661 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
16664 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
16667 goto pool32f_invalid
;
16671 /* regular FP ops */
16672 switch ((ctx
->opcode
>> 6) & 0x3) {
16674 FINSN_3ARG_SDPS(ADD
);
16677 FINSN_3ARG_SDPS(SUB
);
16680 FINSN_3ARG_SDPS(MUL
);
16683 fmt
= (ctx
->opcode
>> 8) & 0x3;
16685 mips32_op
= OPC_DIV_D
;
16686 } else if (fmt
== 0) {
16687 mips32_op
= OPC_DIV_S
;
16689 goto pool32f_invalid
;
16693 goto pool32f_invalid
;
16698 switch ((ctx
->opcode
>> 6) & 0x7) {
16699 case MOVN_FMT
: /* SELEQZ_FMT */
16700 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16702 switch ((ctx
->opcode
>> 9) & 0x3) {
16704 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
16707 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
16710 goto pool32f_invalid
;
16714 FINSN_3ARG_SDPS(MOVN
);
16718 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16719 FINSN_3ARG_SDPS(MOVN
);
16721 case MOVZ_FMT
: /* SELNEZ_FMT */
16722 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16724 switch ((ctx
->opcode
>> 9) & 0x3) {
16726 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
16729 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
16732 goto pool32f_invalid
;
16736 FINSN_3ARG_SDPS(MOVZ
);
16740 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16741 FINSN_3ARG_SDPS(MOVZ
);
16744 check_insn(ctx
, ISA_MIPS_R6
);
16745 switch ((ctx
->opcode
>> 9) & 0x3) {
16747 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
16750 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
16753 goto pool32f_invalid
;
16757 check_insn(ctx
, ISA_MIPS_R6
);
16758 switch ((ctx
->opcode
>> 9) & 0x3) {
16760 mips32_op
= OPC_MADDF_S
;
16763 mips32_op
= OPC_MADDF_D
;
16766 goto pool32f_invalid
;
16770 check_insn(ctx
, ISA_MIPS_R6
);
16771 switch ((ctx
->opcode
>> 9) & 0x3) {
16773 mips32_op
= OPC_MSUBF_S
;
16776 mips32_op
= OPC_MSUBF_D
;
16779 goto pool32f_invalid
;
16783 goto pool32f_invalid
;
16787 gen_farith(ctx
, mips32_op
, rt
, rs
, rd
, 0);
16791 MIPS_INVAL("pool32f");
16792 gen_reserved_instruction(ctx
);
16796 generate_exception_err(ctx
, EXCP_CpU
, 1);
16800 minor
= (ctx
->opcode
>> 21) & 0x1f;
16803 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16804 gen_compute_branch(ctx
, OPC_BLTZ
, 4, rs
, -1, imm
<< 1, 4);
16807 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16808 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 4);
16809 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16812 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16813 gen_compute_branch(ctx
, OPC_BLTZAL
, 4, rs
, -1, imm
<< 1, 2);
16814 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16817 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16818 gen_compute_branch(ctx
, OPC_BGEZ
, 4, rs
, -1, imm
<< 1, 4);
16821 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16822 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 4);
16823 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16826 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16827 gen_compute_branch(ctx
, OPC_BGEZAL
, 4, rs
, -1, imm
<< 1, 2);
16828 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
16831 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16832 gen_compute_branch(ctx
, OPC_BLEZ
, 4, rs
, -1, imm
<< 1, 4);
16835 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16836 gen_compute_branch(ctx
, OPC_BGTZ
, 4, rs
, -1, imm
<< 1, 4);
16840 case TLTI
: /* BC1EQZC */
16841 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16843 check_cp1_enabled(ctx
);
16844 gen_compute_branch1_r6(ctx
, OPC_BC1EQZ
, rs
, imm
<< 1, 0);
16847 mips32_op
= OPC_TLTI
;
16851 case TGEI
: /* BC1NEZC */
16852 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16854 check_cp1_enabled(ctx
);
16855 gen_compute_branch1_r6(ctx
, OPC_BC1NEZ
, rs
, imm
<< 1, 0);
16858 mips32_op
= OPC_TGEI
;
16863 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16864 mips32_op
= OPC_TLTIU
;
16867 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16868 mips32_op
= OPC_TGEIU
;
16870 case TNEI
: /* SYNCI */
16871 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
16874 * Break the TB to be able to sync copied instructions
16877 ctx
->base
.is_jmp
= DISAS_STOP
;
16880 mips32_op
= OPC_TNEI
;
16885 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16886 mips32_op
= OPC_TEQI
;
16888 gen_trap(ctx
, mips32_op
, rs
, -1, imm
);
16893 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16894 gen_compute_branch(ctx
, minor
== BNEZC
? OPC_BNE
: OPC_BEQ
,
16895 4, rs
, 0, imm
<< 1, 0);
16897 * Compact branches don't have a delay slot, so just let
16898 * the normal delay slot handling take us to the branch
16903 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16904 gen_logic_imm(ctx
, OPC_LUI
, rs
, 0, imm
);
16907 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16909 * Break the TB to be able to sync copied instructions
16912 ctx
->base
.is_jmp
= DISAS_STOP
;
16916 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16917 /* COP2: Not implemented. */
16918 generate_exception_err(ctx
, EXCP_CpU
, 2);
16921 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16922 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1FANY2
: OPC_BC1F
;
16925 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16926 mips32_op
= (ctx
->opcode
& (1 << 16)) ? OPC_BC1TANY2
: OPC_BC1T
;
16929 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16930 mips32_op
= OPC_BC1FANY4
;
16933 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16934 mips32_op
= OPC_BC1TANY4
;
16937 check_insn(ctx
, ASE_MIPS3D
);
16940 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
16941 check_cp1_enabled(ctx
);
16942 gen_compute_branch1(ctx
, mips32_op
,
16943 (ctx
->opcode
>> 18) & 0x7, imm
<< 1);
16945 generate_exception_err(ctx
, EXCP_CpU
, 1);
16950 /* MIPS DSP: not implemented */
16953 MIPS_INVAL("pool32i");
16954 gen_reserved_instruction(ctx
);
16959 minor
= (ctx
->opcode
>> 12) & 0xf;
16960 offset
= sextract32(ctx
->opcode
, 0,
16961 (ctx
->insn_flags
& ISA_MIPS_R6
) ? 9 : 12);
16964 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16965 mips32_op
= OPC_LWL
;
16968 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16969 mips32_op
= OPC_SWL
;
16972 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16973 mips32_op
= OPC_LWR
;
16976 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16977 mips32_op
= OPC_SWR
;
16979 #if defined(TARGET_MIPS64)
16981 check_insn(ctx
, ISA_MIPS3
);
16982 check_mips_64(ctx
);
16983 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16984 mips32_op
= OPC_LDL
;
16987 check_insn(ctx
, ISA_MIPS3
);
16988 check_mips_64(ctx
);
16989 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16990 mips32_op
= OPC_SDL
;
16993 check_insn(ctx
, ISA_MIPS3
);
16994 check_mips_64(ctx
);
16995 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
16996 mips32_op
= OPC_LDR
;
16999 check_insn(ctx
, ISA_MIPS3
);
17000 check_mips_64(ctx
);
17001 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17002 mips32_op
= OPC_SDR
;
17005 check_insn(ctx
, ISA_MIPS3
);
17006 check_mips_64(ctx
);
17007 mips32_op
= OPC_LWU
;
17010 check_insn(ctx
, ISA_MIPS3
);
17011 check_mips_64(ctx
);
17012 mips32_op
= OPC_LLD
;
17016 mips32_op
= OPC_LL
;
17019 gen_ld(ctx
, mips32_op
, rt
, rs
, offset
);
17022 gen_st(ctx
, mips32_op
, rt
, rs
, offset
);
17025 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, false);
17027 #if defined(TARGET_MIPS64)
17029 check_insn(ctx
, ISA_MIPS3
);
17030 check_mips_64(ctx
);
17031 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TEQ
, false);
17036 MIPS_INVAL("pool32c ld-eva");
17037 gen_reserved_instruction(ctx
);
17040 check_cp0_enabled(ctx
);
17042 minor2
= (ctx
->opcode
>> 9) & 0x7;
17043 offset
= sextract32(ctx
->opcode
, 0, 9);
17046 mips32_op
= OPC_LBUE
;
17049 mips32_op
= OPC_LHUE
;
17052 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17053 mips32_op
= OPC_LWLE
;
17056 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17057 mips32_op
= OPC_LWRE
;
17060 mips32_op
= OPC_LBE
;
17063 mips32_op
= OPC_LHE
;
17066 mips32_op
= OPC_LLE
;
17069 mips32_op
= OPC_LWE
;
17075 MIPS_INVAL("pool32c st-eva");
17076 gen_reserved_instruction(ctx
);
17079 check_cp0_enabled(ctx
);
17081 minor2
= (ctx
->opcode
>> 9) & 0x7;
17082 offset
= sextract32(ctx
->opcode
, 0, 9);
17085 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17086 mips32_op
= OPC_SWLE
;
17089 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17090 mips32_op
= OPC_SWRE
;
17093 /* Treat as no-op */
17094 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (rt
>= 24)) {
17095 /* hint codes 24-31 are reserved and signal RI */
17096 generate_exception(ctx
, EXCP_RI
);
17100 /* Treat as no-op */
17101 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
17102 gen_cache_operation(ctx
, rt
, rs
, offset
);
17106 mips32_op
= OPC_SBE
;
17109 mips32_op
= OPC_SHE
;
17112 gen_st_cond(ctx
, rt
, rs
, offset
, MO_TESL
, true);
17115 mips32_op
= OPC_SWE
;
17120 /* Treat as no-op */
17121 if ((ctx
->insn_flags
& ISA_MIPS_R6
) && (rt
>= 24)) {
17122 /* hint codes 24-31 are reserved and signal RI */
17123 generate_exception(ctx
, EXCP_RI
);
17127 MIPS_INVAL("pool32c");
17128 gen_reserved_instruction(ctx
);
17132 case ADDI32
: /* AUI, LUI */
17133 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17135 gen_logic_imm(ctx
, OPC_LUI
, rt
, rs
, imm
);
17138 mips32_op
= OPC_ADDI
;
17143 mips32_op
= OPC_ADDIU
;
17145 gen_arith_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17148 /* Logical operations */
17150 mips32_op
= OPC_ORI
;
17153 mips32_op
= OPC_XORI
;
17156 mips32_op
= OPC_ANDI
;
17158 gen_logic_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17161 /* Set less than immediate */
17163 mips32_op
= OPC_SLTI
;
17166 mips32_op
= OPC_SLTIU
;
17168 gen_slt_imm(ctx
, mips32_op
, rt
, rs
, imm
);
17171 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17172 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
17173 gen_compute_branch(ctx
, OPC_JALX
, 4, rt
, rs
, offset
, 4);
17174 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17176 case JALS32
: /* BOVC, BEQC, BEQZALC */
17177 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17180 mips32_op
= OPC_BOVC
;
17181 } else if (rs
< rt
&& rs
== 0) {
17183 mips32_op
= OPC_BEQZALC
;
17186 mips32_op
= OPC_BEQC
;
17188 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17191 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1;
17192 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
, offset
, 2);
17193 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17196 case BEQ32
: /* BC */
17197 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17199 gen_compute_compact_branch(ctx
, OPC_BC
, 0, 0,
17200 sextract32(ctx
->opcode
<< 1, 0, 27));
17203 gen_compute_branch(ctx
, OPC_BEQ
, 4, rt
, rs
, imm
<< 1, 4);
17206 case BNE32
: /* BALC */
17207 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17209 gen_compute_compact_branch(ctx
, OPC_BALC
, 0, 0,
17210 sextract32(ctx
->opcode
<< 1, 0, 27));
17213 gen_compute_branch(ctx
, OPC_BNE
, 4, rt
, rs
, imm
<< 1, 4);
17216 case J32
: /* BGTZC, BLTZC, BLTC */
17217 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17218 if (rs
== 0 && rt
!= 0) {
17220 mips32_op
= OPC_BGTZC
;
17221 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17223 mips32_op
= OPC_BLTZC
;
17226 mips32_op
= OPC_BLTC
;
17228 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17231 gen_compute_branch(ctx
, OPC_J
, 4, rt
, rs
,
17232 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17235 case JAL32
: /* BLEZC, BGEZC, BGEC */
17236 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17237 if (rs
== 0 && rt
!= 0) {
17239 mips32_op
= OPC_BLEZC
;
17240 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17242 mips32_op
= OPC_BGEZC
;
17245 mips32_op
= OPC_BGEC
;
17247 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17250 gen_compute_branch(ctx
, OPC_JAL
, 4, rt
, rs
,
17251 (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 1, 4);
17252 ctx
->hflags
|= MIPS_HFLAG_BDS_STRICT
;
17255 /* Floating point (COP1) */
17257 mips32_op
= OPC_LWC1
;
17260 mips32_op
= OPC_LDC1
;
17263 mips32_op
= OPC_SWC1
;
17266 mips32_op
= OPC_SDC1
;
17268 gen_cop1_ldst(ctx
, mips32_op
, rt
, rs
, imm
);
17270 case ADDIUPC
: /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17271 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17272 /* PCREL: ADDIUPC, AUIPC, ALUIPC, LWPC */
17273 switch ((ctx
->opcode
>> 16) & 0x1f) {
17282 gen_pcrel(ctx
, OPC_ADDIUPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17285 gen_pcrel(ctx
, OPC_AUIPC
, ctx
->base
.pc_next
, rt
);
17288 gen_pcrel(ctx
, OPC_ALUIPC
, ctx
->base
.pc_next
, rt
);
17298 gen_pcrel(ctx
, R6_OPC_LWPC
, ctx
->base
.pc_next
& ~0x3, rt
);
17301 generate_exception(ctx
, EXCP_RI
);
17306 int reg
= mmreg(ZIMM(ctx
->opcode
, 23, 3));
17307 offset
= SIMM(ctx
->opcode
, 0, 23) << 2;
17309 gen_addiupc(ctx
, reg
, offset
, 0, 0);
17312 case BNVC
: /* BNEC, BNEZALC */
17313 check_insn(ctx
, ISA_MIPS_R6
);
17316 mips32_op
= OPC_BNVC
;
17317 } else if (rs
< rt
&& rs
== 0) {
17319 mips32_op
= OPC_BNEZALC
;
17322 mips32_op
= OPC_BNEC
;
17324 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17326 case R6_BNEZC
: /* JIALC */
17327 check_insn(ctx
, ISA_MIPS_R6
);
17330 gen_compute_compact_branch(ctx
, OPC_BNEZC
, rt
, 0,
17331 sextract32(ctx
->opcode
<< 1, 0, 22));
17334 gen_compute_compact_branch(ctx
, OPC_JIALC
, 0, rs
, imm
);
17337 case R6_BEQZC
: /* JIC */
17338 check_insn(ctx
, ISA_MIPS_R6
);
17341 gen_compute_compact_branch(ctx
, OPC_BEQZC
, rt
, 0,
17342 sextract32(ctx
->opcode
<< 1, 0, 22));
17345 gen_compute_compact_branch(ctx
, OPC_JIC
, 0, rs
, imm
);
17348 case BLEZALC
: /* BGEZALC, BGEUC */
17349 check_insn(ctx
, ISA_MIPS_R6
);
17350 if (rs
== 0 && rt
!= 0) {
17352 mips32_op
= OPC_BLEZALC
;
17353 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17355 mips32_op
= OPC_BGEZALC
;
17358 mips32_op
= OPC_BGEUC
;
17360 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17362 case BGTZALC
: /* BLTZALC, BLTUC */
17363 check_insn(ctx
, ISA_MIPS_R6
);
17364 if (rs
== 0 && rt
!= 0) {
17366 mips32_op
= OPC_BGTZALC
;
17367 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
17369 mips32_op
= OPC_BLTZALC
;
17372 mips32_op
= OPC_BLTUC
;
17374 gen_compute_compact_branch(ctx
, mips32_op
, rs
, rt
, imm
<< 1);
17376 /* Loads and stores */
17378 mips32_op
= OPC_LB
;
17381 mips32_op
= OPC_LBU
;
17384 mips32_op
= OPC_LH
;
17387 mips32_op
= OPC_LHU
;
17390 mips32_op
= OPC_LW
;
17392 #ifdef TARGET_MIPS64
17394 check_insn(ctx
, ISA_MIPS3
);
17395 check_mips_64(ctx
);
17396 mips32_op
= OPC_LD
;
17399 check_insn(ctx
, ISA_MIPS3
);
17400 check_mips_64(ctx
);
17401 mips32_op
= OPC_SD
;
17405 mips32_op
= OPC_SB
;
17408 mips32_op
= OPC_SH
;
17411 mips32_op
= OPC_SW
;
17414 gen_ld(ctx
, mips32_op
, rt
, rs
, imm
);
17417 gen_st(ctx
, mips32_op
, rt
, rs
, imm
);
17420 gen_reserved_instruction(ctx
);
17425 static int decode_micromips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
17429 /* make sure instructions are on a halfword boundary */
17430 if (ctx
->base
.pc_next
& 0x1) {
17431 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
17432 generate_exception_end(ctx
, EXCP_AdEL
);
17436 op
= (ctx
->opcode
>> 10) & 0x3f;
17437 /* Enforce properly-sized instructions in a delay slot */
17438 if (ctx
->hflags
& MIPS_HFLAG_BDS_STRICT
) {
17439 switch (op
& 0x7) { /* MSB-3..MSB-5 */
17441 /* POOL32A, POOL32B, POOL32I, POOL32C */
17443 /* ADDI32, ADDIU32, ORI32, XORI32, SLTI32, SLTIU32, ANDI32, JALX32 */
17445 /* LBU32, LHU32, POOL32F, JALS32, BEQ32, BNE32, J32, JAL32 */
17447 /* SB32, SH32, ADDIUPC, SWC132, SDC132, SW32 */
17449 /* LB32, LH32, LWC132, LDC132, LW32 */
17450 if (ctx
->hflags
& MIPS_HFLAG_BDS16
) {
17451 gen_reserved_instruction(ctx
);
17456 /* POOL16A, POOL16B, POOL16C, LWGP16, POOL16F */
17458 /* LBU16, LHU16, LWSP16, LW16, SB16, SH16, SWSP16, SW16 */
17460 /* MOVE16, ANDI16, POOL16D, POOL16E, BEQZ16, BNEZ16, B16, LI16 */
17461 if (ctx
->hflags
& MIPS_HFLAG_BDS32
) {
17462 gen_reserved_instruction(ctx
);
17472 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17473 int rs1
= mmreg(uMIPS_RS1(ctx
->opcode
));
17474 int rs2
= mmreg(uMIPS_RS2(ctx
->opcode
));
17477 switch (ctx
->opcode
& 0x1) {
17485 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17487 * In the Release 6, the register number location in
17488 * the instruction encoding has changed.
17490 gen_arith(ctx
, opc
, rs1
, rd
, rs2
);
17492 gen_arith(ctx
, opc
, rd
, rs1
, rs2
);
17498 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17499 int rs
= mmreg(uMIPS_RS(ctx
->opcode
));
17500 int amount
= (ctx
->opcode
>> 1) & 0x7;
17502 amount
= amount
== 0 ? 8 : amount
;
17504 switch (ctx
->opcode
& 0x1) {
17513 gen_shift_imm(ctx
, opc
, rd
, rs
, amount
);
17517 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
17518 gen_pool16c_r6_insn(ctx
);
17520 gen_pool16c_insn(ctx
);
17525 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17526 int rb
= 28; /* GP */
17527 int16_t offset
= SIMM(ctx
->opcode
, 0, 7) << 2;
17529 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17533 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
17534 if (ctx
->opcode
& 1) {
17535 gen_reserved_instruction(ctx
);
17538 int enc_dest
= uMIPS_RD(ctx
->opcode
);
17539 int enc_rt
= uMIPS_RS2(ctx
->opcode
);
17540 int enc_rs
= uMIPS_RS1(ctx
->opcode
);
17541 gen_movep(ctx
, enc_dest
, enc_rt
, enc_rs
);
17546 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17547 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17548 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17549 offset
= (offset
== 0xf ? -1 : offset
);
17551 gen_ld(ctx
, OPC_LBU
, rd
, rb
, offset
);
17556 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17557 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17558 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17560 gen_ld(ctx
, OPC_LHU
, rd
, rb
, offset
);
17565 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17566 int rb
= 29; /* SP */
17567 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17569 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17574 int rd
= mmreg(uMIPS_RD(ctx
->opcode
));
17575 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17576 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17578 gen_ld(ctx
, OPC_LW
, rd
, rb
, offset
);
17583 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17584 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17585 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4);
17587 gen_st(ctx
, OPC_SB
, rd
, rb
, offset
);
17592 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17593 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17594 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 1;
17596 gen_st(ctx
, OPC_SH
, rd
, rb
, offset
);
17601 int rd
= (ctx
->opcode
>> 5) & 0x1f;
17602 int rb
= 29; /* SP */
17603 int16_t offset
= ZIMM(ctx
->opcode
, 0, 5) << 2;
17605 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17610 int rd
= mmreg2(uMIPS_RD(ctx
->opcode
));
17611 int rb
= mmreg(uMIPS_RS(ctx
->opcode
));
17612 int16_t offset
= ZIMM(ctx
->opcode
, 0, 4) << 2;
17614 gen_st(ctx
, OPC_SW
, rd
, rb
, offset
);
17619 int rd
= uMIPS_RD5(ctx
->opcode
);
17620 int rs
= uMIPS_RS5(ctx
->opcode
);
17622 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, 0);
17629 switch (ctx
->opcode
& 0x1) {
17639 switch (ctx
->opcode
& 0x1) {
17644 gen_addiur1sp(ctx
);
17648 case B16
: /* BC16 */
17649 gen_compute_branch(ctx
, OPC_BEQ
, 2, 0, 0,
17650 sextract32(ctx
->opcode
, 0, 10) << 1,
17651 (ctx
->insn_flags
& ISA_MIPS_R6
) ? 0 : 4);
17653 case BNEZ16
: /* BNEZC16 */
17654 case BEQZ16
: /* BEQZC16 */
17655 gen_compute_branch(ctx
, op
== BNEZ16
? OPC_BNE
: OPC_BEQ
, 2,
17656 mmreg(uMIPS_RD(ctx
->opcode
)),
17657 0, sextract32(ctx
->opcode
, 0, 7) << 1,
17658 (ctx
->insn_flags
& ISA_MIPS_R6
) ? 0 : 4);
17663 int reg
= mmreg(uMIPS_RD(ctx
->opcode
));
17664 int imm
= ZIMM(ctx
->opcode
, 0, 7);
17666 imm
= (imm
== 0x7f ? -1 : imm
);
17667 tcg_gen_movi_tl(cpu_gpr
[reg
], imm
);
17673 gen_reserved_instruction(ctx
);
17676 decode_micromips32_opc(env
, ctx
);
17689 /* MAJOR, P16, and P32 pools opcodes */
17693 NM_MOVE_BALC
= 0x02,
17701 NM_P16_SHIFT
= 0x0c,
17719 NM_P_LS_U12
= 0x21,
17729 NM_P16_ADDU
= 0x2c,
17743 NM_MOVEPREV
= 0x3f,
17746 /* POOL32A instruction pool */
17748 NM_POOL32A0
= 0x00,
17749 NM_SPECIAL2
= 0x01,
17752 NM_POOL32A5
= 0x05,
17753 NM_POOL32A7
= 0x07,
17756 /* P.GP.W instruction pool */
17758 NM_ADDIUGP_W
= 0x00,
17763 /* P48I instruction pool */
17767 NM_ADDIUGP48
= 0x02,
17768 NM_ADDIUPC48
= 0x03,
17773 /* P.U12 instruction pool */
17782 NM_ADDIUNEG
= 0x08,
17789 /* POOL32F instruction pool */
17791 NM_POOL32F_0
= 0x00,
17792 NM_POOL32F_3
= 0x03,
17793 NM_POOL32F_5
= 0x05,
17796 /* POOL32S instruction pool */
17798 NM_POOL32S_0
= 0x00,
17799 NM_POOL32S_4
= 0x04,
17802 /* P.LUI instruction pool */
17808 /* P.GP.BH instruction pool */
17813 NM_ADDIUGP_B
= 0x03,
17816 NM_P_GP_CP1
= 0x06,
17819 /* P.LS.U12 instruction pool */
17824 NM_P_PREFU12
= 0x03,
17837 /* P.LS.S9 instruction pool */
17843 NM_P_LS_UAWM
= 0x05,
17846 /* P.BAL instruction pool */
17852 /* P.J instruction pool */
17855 NM_JALRC_HB
= 0x01,
17856 NM_P_BALRSC
= 0x08,
17859 /* P.BR1 instruction pool */
17867 /* P.BR2 instruction pool */
17874 /* P.BRI instruction pool */
17886 /* P16.SHIFT instruction pool */
17892 /* POOL16C instruction pool */
17894 NM_POOL16C_0
= 0x00,
17898 /* P16.A1 instruction pool */
17900 NM_ADDIUR1SP
= 0x01,
17903 /* P16.A2 instruction pool */
17906 NM_P_ADDIURS5
= 0x01,
17909 /* P16.ADDU instruction pool */
17915 /* P16.SR instruction pool */
17918 NM_RESTORE_JRC16
= 0x01,
17921 /* P16.4X4 instruction pool */
17927 /* P16.LB instruction pool */
17934 /* P16.LH instruction pool */
17941 /* P.RI instruction pool */
17944 NM_P_SYSCALL
= 0x01,
17949 /* POOL32A0 instruction pool */
17984 NM_D_E_MT_VPE
= 0x56,
17992 /* CRC32 instruction pool */
18002 /* POOL32A5 instruction pool */
18004 NM_CMP_EQ_PH
= 0x00,
18005 NM_CMP_LT_PH
= 0x08,
18006 NM_CMP_LE_PH
= 0x10,
18007 NM_CMPGU_EQ_QB
= 0x18,
18008 NM_CMPGU_LT_QB
= 0x20,
18009 NM_CMPGU_LE_QB
= 0x28,
18010 NM_CMPGDU_EQ_QB
= 0x30,
18011 NM_CMPGDU_LT_QB
= 0x38,
18012 NM_CMPGDU_LE_QB
= 0x40,
18013 NM_CMPU_EQ_QB
= 0x48,
18014 NM_CMPU_LT_QB
= 0x50,
18015 NM_CMPU_LE_QB
= 0x58,
18016 NM_ADDQ_S_W
= 0x60,
18017 NM_SUBQ_S_W
= 0x68,
18021 NM_ADDQ_S_PH
= 0x01,
18022 NM_ADDQH_R_PH
= 0x09,
18023 NM_ADDQH_R_W
= 0x11,
18024 NM_ADDU_S_QB
= 0x19,
18025 NM_ADDU_S_PH
= 0x21,
18026 NM_ADDUH_R_QB
= 0x29,
18027 NM_SHRAV_R_PH
= 0x31,
18028 NM_SHRAV_R_QB
= 0x39,
18029 NM_SUBQ_S_PH
= 0x41,
18030 NM_SUBQH_R_PH
= 0x49,
18031 NM_SUBQH_R_W
= 0x51,
18032 NM_SUBU_S_QB
= 0x59,
18033 NM_SUBU_S_PH
= 0x61,
18034 NM_SUBUH_R_QB
= 0x69,
18035 NM_SHLLV_S_PH
= 0x71,
18036 NM_PRECR_SRA_R_PH_W
= 0x79,
18038 NM_MULEU_S_PH_QBL
= 0x12,
18039 NM_MULEU_S_PH_QBR
= 0x1a,
18040 NM_MULQ_RS_PH
= 0x22,
18041 NM_MULQ_S_PH
= 0x2a,
18042 NM_MULQ_RS_W
= 0x32,
18043 NM_MULQ_S_W
= 0x3a,
18046 NM_SHRAV_R_W
= 0x5a,
18047 NM_SHRLV_PH
= 0x62,
18048 NM_SHRLV_QB
= 0x6a,
18049 NM_SHLLV_QB
= 0x72,
18050 NM_SHLLV_S_W
= 0x7a,
18054 NM_MULEQ_S_W_PHL
= 0x04,
18055 NM_MULEQ_S_W_PHR
= 0x0c,
18057 NM_MUL_S_PH
= 0x05,
18058 NM_PRECR_QB_PH
= 0x0d,
18059 NM_PRECRQ_QB_PH
= 0x15,
18060 NM_PRECRQ_PH_W
= 0x1d,
18061 NM_PRECRQ_RS_PH_W
= 0x25,
18062 NM_PRECRQU_S_QB_PH
= 0x2d,
18063 NM_PACKRL_PH
= 0x35,
18067 NM_SHRA_R_W
= 0x5e,
18068 NM_SHRA_R_PH
= 0x66,
18069 NM_SHLL_S_PH
= 0x76,
18070 NM_SHLL_S_W
= 0x7e,
18075 /* POOL32A7 instruction pool */
18080 NM_POOL32AXF
= 0x07,
18083 /* P.SR instruction pool */
18089 /* P.SHIFT instruction pool */
18097 /* P.ROTX instruction pool */
18102 /* P.INS instruction pool */
18107 /* P.EXT instruction pool */
18112 /* POOL32F_0 (fmt) instruction pool */
18117 NM_SELEQZ_S
= 0x07,
18118 NM_SELEQZ_D
= 0x47,
18122 NM_SELNEZ_S
= 0x0f,
18123 NM_SELNEZ_D
= 0x4f,
18138 /* POOL32F_3 instruction pool */
18142 NM_MINA_FMT
= 0x04,
18143 NM_MAXA_FMT
= 0x05,
18144 NM_POOL32FXF
= 0x07,
18147 /* POOL32F_5 instruction pool */
18149 NM_CMP_CONDN_S
= 0x00,
18150 NM_CMP_CONDN_D
= 0x02,
18153 /* P.GP.LH instruction pool */
18159 /* P.GP.SH instruction pool */
18164 /* P.GP.CP1 instruction pool */
18172 /* P.LS.S0 instruction pool */
18189 NM_P_PREFS9
= 0x03,
18195 /* P.LS.S1 instruction pool */
18197 NM_ASET_ACLR
= 0x02,
18205 /* P.LS.E0 instruction pool */
18221 /* P.PREFE instruction pool */
18227 /* P.LLE instruction pool */
18233 /* P.SCE instruction pool */
18239 /* P.LS.WM instruction pool */
18245 /* P.LS.UAWM instruction pool */
18251 /* P.BR3A instruction pool */
18257 NM_BPOSGE32C
= 0x04,
18260 /* P16.RI instruction pool */
18262 NM_P16_SYSCALL
= 0x01,
18267 /* POOL16C_0 instruction pool */
18269 NM_POOL16C_00
= 0x00,
18272 /* P16.JRC instruction pool */
18278 /* P.SYSCALL instruction pool */
18284 /* P.TRAP instruction pool */
18290 /* P.CMOVE instruction pool */
18296 /* POOL32Axf instruction pool */
18298 NM_POOL32AXF_1
= 0x01,
18299 NM_POOL32AXF_2
= 0x02,
18300 NM_POOL32AXF_4
= 0x04,
18301 NM_POOL32AXF_5
= 0x05,
18302 NM_POOL32AXF_7
= 0x07,
18305 /* POOL32Axf_1 instruction pool */
18307 NM_POOL32AXF_1_0
= 0x00,
18308 NM_POOL32AXF_1_1
= 0x01,
18309 NM_POOL32AXF_1_3
= 0x03,
18310 NM_POOL32AXF_1_4
= 0x04,
18311 NM_POOL32AXF_1_5
= 0x05,
18312 NM_POOL32AXF_1_7
= 0x07,
18315 /* POOL32Axf_2 instruction pool */
18317 NM_POOL32AXF_2_0_7
= 0x00,
18318 NM_POOL32AXF_2_8_15
= 0x01,
18319 NM_POOL32AXF_2_16_23
= 0x02,
18320 NM_POOL32AXF_2_24_31
= 0x03,
18323 /* POOL32Axf_7 instruction pool */
18325 NM_SHRA_R_QB
= 0x0,
18330 /* POOL32Axf_1_0 instruction pool */
18338 /* POOL32Axf_1_1 instruction pool */
18344 /* POOL32Axf_1_3 instruction pool */
18352 /* POOL32Axf_1_4 instruction pool */
18358 /* POOL32Axf_1_5 instruction pool */
18360 NM_MAQ_S_W_PHR
= 0x0,
18361 NM_MAQ_S_W_PHL
= 0x1,
18362 NM_MAQ_SA_W_PHR
= 0x2,
18363 NM_MAQ_SA_W_PHL
= 0x3,
18366 /* POOL32Axf_1_7 instruction pool */
18370 NM_EXTR_RS_W
= 0x2,
18374 /* POOL32Axf_2_0_7 instruction pool */
18377 NM_DPAQ_S_W_PH
= 0x1,
18379 NM_DPSQ_S_W_PH
= 0x3,
18386 /* POOL32Axf_2_8_15 instruction pool */
18388 NM_DPAX_W_PH
= 0x0,
18389 NM_DPAQ_SA_L_W
= 0x1,
18390 NM_DPSX_W_PH
= 0x2,
18391 NM_DPSQ_SA_L_W
= 0x3,
18394 NM_EXTRV_R_W
= 0x7,
18397 /* POOL32Axf_2_16_23 instruction pool */
18399 NM_DPAU_H_QBL
= 0x0,
18400 NM_DPAQX_S_W_PH
= 0x1,
18401 NM_DPSU_H_QBL
= 0x2,
18402 NM_DPSQX_S_W_PH
= 0x3,
18405 NM_MULSA_W_PH
= 0x6,
18406 NM_EXTRV_RS_W
= 0x7,
18409 /* POOL32Axf_2_24_31 instruction pool */
18411 NM_DPAU_H_QBR
= 0x0,
18412 NM_DPAQX_SA_W_PH
= 0x1,
18413 NM_DPSU_H_QBR
= 0x2,
18414 NM_DPSQX_SA_W_PH
= 0x3,
18417 NM_MULSAQ_S_W_PH
= 0x6,
18418 NM_EXTRV_S_H
= 0x7,
18421 /* POOL32Axf_{4, 5} instruction pool */
18440 /* nanoMIPS DSP instructions */
18441 NM_ABSQ_S_QB
= 0x00,
18442 NM_ABSQ_S_PH
= 0x08,
18443 NM_ABSQ_S_W
= 0x10,
18444 NM_PRECEQ_W_PHL
= 0x28,
18445 NM_PRECEQ_W_PHR
= 0x30,
18446 NM_PRECEQU_PH_QBL
= 0x38,
18447 NM_PRECEQU_PH_QBR
= 0x48,
18448 NM_PRECEU_PH_QBL
= 0x58,
18449 NM_PRECEU_PH_QBR
= 0x68,
18450 NM_PRECEQU_PH_QBLA
= 0x39,
18451 NM_PRECEQU_PH_QBRA
= 0x49,
18452 NM_PRECEU_PH_QBLA
= 0x59,
18453 NM_PRECEU_PH_QBRA
= 0x69,
18454 NM_REPLV_PH
= 0x01,
18455 NM_REPLV_QB
= 0x09,
18458 NM_RADDU_W_QB
= 0x78,
18464 /* PP.SR instruction pool */
18468 NM_RESTORE_JRC
= 0x03,
18471 /* P.SR.F instruction pool */
18474 NM_RESTOREF
= 0x01,
18477 /* P16.SYSCALL instruction pool */
18479 NM_SYSCALL16
= 0x00,
18480 NM_HYPCALL16
= 0x01,
18483 /* POOL16C_00 instruction pool */
18491 /* PP.LSX and PP.LSXS instruction pool */
18529 /* ERETx instruction pool */
18535 /* POOL32FxF_{0, 1} insturction pool */
18544 NM_CVT_S_PL
= 0x84,
18545 NM_CVT_S_PU
= 0xa4,
18547 NM_CVT_L_S
= 0x004,
18548 NM_CVT_L_D
= 0x104,
18549 NM_CVT_W_S
= 0x024,
18550 NM_CVT_W_D
= 0x124,
18552 NM_RSQRT_S
= 0x008,
18553 NM_RSQRT_D
= 0x108,
18558 NM_RECIP_S
= 0x048,
18559 NM_RECIP_D
= 0x148,
18561 NM_FLOOR_L_S
= 0x00c,
18562 NM_FLOOR_L_D
= 0x10c,
18564 NM_FLOOR_W_S
= 0x02c,
18565 NM_FLOOR_W_D
= 0x12c,
18567 NM_CEIL_L_S
= 0x04c,
18568 NM_CEIL_L_D
= 0x14c,
18569 NM_CEIL_W_S
= 0x06c,
18570 NM_CEIL_W_D
= 0x16c,
18571 NM_TRUNC_L_S
= 0x08c,
18572 NM_TRUNC_L_D
= 0x18c,
18573 NM_TRUNC_W_S
= 0x0ac,
18574 NM_TRUNC_W_D
= 0x1ac,
18575 NM_ROUND_L_S
= 0x0cc,
18576 NM_ROUND_L_D
= 0x1cc,
18577 NM_ROUND_W_S
= 0x0ec,
18578 NM_ROUND_W_D
= 0x1ec,
18586 NM_CVT_D_S
= 0x04d,
18587 NM_CVT_D_W
= 0x0cd,
18588 NM_CVT_D_L
= 0x14d,
18589 NM_CVT_S_D
= 0x06d,
18590 NM_CVT_S_W
= 0x0ed,
18591 NM_CVT_S_L
= 0x16d,
18594 /* P.LL instruction pool */
18600 /* P.SC instruction pool */
18606 /* P.DVP instruction pool */
18615 * nanoMIPS decoding engine
18620 /* extraction utilities */
18622 #define NANOMIPS_EXTRACT_RT3(op) ((op >> 7) & 0x7)
18623 #define NANOMIPS_EXTRACT_RS3(op) ((op >> 4) & 0x7)
18624 #define NANOMIPS_EXTRACT_RD3(op) ((op >> 1) & 0x7)
18625 #define NANOMIPS_EXTRACT_RD5(op) ((op >> 5) & 0x1f)
18626 #define NANOMIPS_EXTRACT_RS5(op) (op & 0x1f)
18628 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3'). */
18629 static inline int decode_gpr_gpr3(int r
)
18631 static const int map
[] = { 16, 17, 18, 19, 4, 5, 6, 7 };
18633 return map
[r
& 0x7];
18636 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr3.src.store'). */
18637 static inline int decode_gpr_gpr3_src_store(int r
)
18639 static const int map
[] = { 0, 17, 18, 19, 4, 5, 6, 7 };
18641 return map
[r
& 0x7];
18644 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4'). */
18645 static inline int decode_gpr_gpr4(int r
)
18647 static const int map
[] = { 8, 9, 10, 11, 4, 5, 6, 7,
18648 16, 17, 18, 19, 20, 21, 22, 23 };
18650 return map
[r
& 0xf];
18653 /* Implement nanoMIPS pseudocode decode_gpr(encoded_gpr, 'gpr4.zero'). */
18654 static inline int decode_gpr_gpr4_zero(int r
)
18656 static const int map
[] = { 8, 9, 10, 0, 4, 5, 6, 7,
18657 16, 17, 18, 19, 20, 21, 22, 23 };
18659 return map
[r
& 0xf];
18663 static void gen_adjust_sp(DisasContext
*ctx
, int u
)
18665 gen_op_addr_addi(ctx
, cpu_gpr
[29], cpu_gpr
[29], u
);
18668 static void gen_save(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18669 uint8_t gp
, uint16_t u
)
18672 TCGv va
= tcg_temp_new();
18673 TCGv t0
= tcg_temp_new();
18675 while (counter
!= count
) {
18676 bool use_gp
= gp
&& (counter
== count
- 1);
18677 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18678 int this_offset
= -((counter
+ 1) << 2);
18679 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18680 gen_load_gpr(t0
, this_rt
);
18681 tcg_gen_qemu_st_tl(t0
, va
, ctx
->mem_idx
,
18682 (MO_TEUL
| ctx
->default_tcg_memop_mask
));
18686 /* adjust stack pointer */
18687 gen_adjust_sp(ctx
, -u
);
18693 static void gen_restore(DisasContext
*ctx
, uint8_t rt
, uint8_t count
,
18694 uint8_t gp
, uint16_t u
)
18697 TCGv va
= tcg_temp_new();
18698 TCGv t0
= tcg_temp_new();
18700 while (counter
!= count
) {
18701 bool use_gp
= gp
&& (counter
== count
- 1);
18702 int this_rt
= use_gp
? 28 : (rt
& 0x10) | ((rt
+ counter
) & 0x1f);
18703 int this_offset
= u
- ((counter
+ 1) << 2);
18704 gen_base_offset_addr(ctx
, va
, 29, this_offset
);
18705 tcg_gen_qemu_ld_tl(t0
, va
, ctx
->mem_idx
, MO_TESL
|
18706 ctx
->default_tcg_memop_mask
);
18707 tcg_gen_ext32s_tl(t0
, t0
);
18708 gen_store_gpr(t0
, this_rt
);
18712 /* adjust stack pointer */
18713 gen_adjust_sp(ctx
, u
);
18719 static void gen_pool16c_nanomips_insn(DisasContext
*ctx
)
18721 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
18722 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
18724 switch (extract32(ctx
->opcode
, 2, 2)) {
18726 gen_logic(ctx
, OPC_NOR
, rt
, rs
, 0);
18729 gen_logic(ctx
, OPC_AND
, rt
, rt
, rs
);
18732 gen_logic(ctx
, OPC_XOR
, rt
, rt
, rs
);
18735 gen_logic(ctx
, OPC_OR
, rt
, rt
, rs
);
18740 static void gen_pool32a0_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
18742 int rt
= extract32(ctx
->opcode
, 21, 5);
18743 int rs
= extract32(ctx
->opcode
, 16, 5);
18744 int rd
= extract32(ctx
->opcode
, 11, 5);
18746 switch (extract32(ctx
->opcode
, 3, 7)) {
18748 switch (extract32(ctx
->opcode
, 10, 1)) {
18751 gen_trap(ctx
, OPC_TEQ
, rs
, rt
, -1);
18755 gen_trap(ctx
, OPC_TNE
, rs
, rt
, -1);
18761 gen_rdhwr(ctx
, rt
, rs
, extract32(ctx
->opcode
, 11, 3));
18765 gen_bshfl(ctx
, OPC_SEB
, rs
, rt
);
18768 gen_bshfl(ctx
, OPC_SEH
, rs
, rt
);
18771 gen_shift(ctx
, OPC_SLLV
, rd
, rt
, rs
);
18774 gen_shift(ctx
, OPC_SRLV
, rd
, rt
, rs
);
18777 gen_shift(ctx
, OPC_SRAV
, rd
, rt
, rs
);
18780 gen_shift(ctx
, OPC_ROTRV
, rd
, rt
, rs
);
18783 gen_arith(ctx
, OPC_ADD
, rd
, rs
, rt
);
18786 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
18790 gen_arith(ctx
, OPC_SUB
, rd
, rs
, rt
);
18793 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
18796 switch (extract32(ctx
->opcode
, 10, 1)) {
18798 gen_cond_move(ctx
, OPC_MOVZ
, rd
, rs
, rt
);
18801 gen_cond_move(ctx
, OPC_MOVN
, rd
, rs
, rt
);
18806 gen_logic(ctx
, OPC_AND
, rd
, rs
, rt
);
18809 gen_logic(ctx
, OPC_OR
, rd
, rs
, rt
);
18812 gen_logic(ctx
, OPC_NOR
, rd
, rs
, rt
);
18815 gen_logic(ctx
, OPC_XOR
, rd
, rs
, rt
);
18818 gen_slt(ctx
, OPC_SLT
, rd
, rs
, rt
);
18823 #ifndef CONFIG_USER_ONLY
18824 TCGv t0
= tcg_temp_new();
18825 switch (extract32(ctx
->opcode
, 10, 1)) {
18828 check_cp0_enabled(ctx
);
18829 gen_helper_dvp(t0
, cpu_env
);
18830 gen_store_gpr(t0
, rt
);
18835 check_cp0_enabled(ctx
);
18836 gen_helper_evp(t0
, cpu_env
);
18837 gen_store_gpr(t0
, rt
);
18844 gen_slt(ctx
, OPC_SLTU
, rd
, rs
, rt
);
18849 TCGv t0
= tcg_temp_new();
18850 TCGv t1
= tcg_temp_new();
18851 TCGv t2
= tcg_temp_new();
18853 gen_load_gpr(t1
, rs
);
18854 gen_load_gpr(t2
, rt
);
18855 tcg_gen_add_tl(t0
, t1
, t2
);
18856 tcg_gen_ext32s_tl(t0
, t0
);
18857 tcg_gen_xor_tl(t1
, t1
, t2
);
18858 tcg_gen_xor_tl(t2
, t0
, t2
);
18859 tcg_gen_andc_tl(t1
, t2
, t1
);
18861 /* operands of same sign, result different sign */
18862 tcg_gen_setcondi_tl(TCG_COND_LT
, t0
, t1
, 0);
18863 gen_store_gpr(t0
, rd
);
18871 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rd
, rs
, rt
);
18874 gen_r6_muldiv(ctx
, R6_OPC_MUH
, rd
, rs
, rt
);
18877 gen_r6_muldiv(ctx
, R6_OPC_MULU
, rd
, rs
, rt
);
18880 gen_r6_muldiv(ctx
, R6_OPC_MUHU
, rd
, rs
, rt
);
18883 gen_r6_muldiv(ctx
, R6_OPC_DIV
, rd
, rs
, rt
);
18886 gen_r6_muldiv(ctx
, R6_OPC_MOD
, rd
, rs
, rt
);
18889 gen_r6_muldiv(ctx
, R6_OPC_DIVU
, rd
, rs
, rt
);
18892 gen_r6_muldiv(ctx
, R6_OPC_MODU
, rd
, rs
, rt
);
18894 #ifndef CONFIG_USER_ONLY
18896 check_cp0_enabled(ctx
);
18898 /* Treat as NOP. */
18901 gen_mfc0(ctx
, cpu_gpr
[rt
], rs
, extract32(ctx
->opcode
, 11, 3));
18904 check_cp0_enabled(ctx
);
18906 TCGv t0
= tcg_temp_new();
18908 gen_load_gpr(t0
, rt
);
18909 gen_mtc0(ctx
, t0
, rs
, extract32(ctx
->opcode
, 11, 3));
18913 case NM_D_E_MT_VPE
:
18915 uint8_t sc
= extract32(ctx
->opcode
, 10, 1);
18916 TCGv t0
= tcg_temp_new();
18923 gen_helper_dmt(t0
);
18924 gen_store_gpr(t0
, rt
);
18925 } else if (rs
== 0) {
18928 gen_helper_dvpe(t0
, cpu_env
);
18929 gen_store_gpr(t0
, rt
);
18931 gen_reserved_instruction(ctx
);
18938 gen_helper_emt(t0
);
18939 gen_store_gpr(t0
, rt
);
18940 } else if (rs
== 0) {
18943 gen_helper_evpe(t0
, cpu_env
);
18944 gen_store_gpr(t0
, rt
);
18946 gen_reserved_instruction(ctx
);
18957 TCGv t0
= tcg_temp_new();
18958 TCGv t1
= tcg_temp_new();
18960 gen_load_gpr(t0
, rt
);
18961 gen_load_gpr(t1
, rs
);
18962 gen_helper_fork(t0
, t1
);
18969 check_cp0_enabled(ctx
);
18971 /* Treat as NOP. */
18974 gen_mftr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18975 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18979 check_cp0_enabled(ctx
);
18980 gen_mttr(env
, ctx
, rs
, rt
, extract32(ctx
->opcode
, 10, 1),
18981 extract32(ctx
->opcode
, 11, 5), extract32(ctx
->opcode
, 3, 1));
18986 TCGv t0
= tcg_temp_new();
18988 gen_load_gpr(t0
, rs
);
18989 gen_helper_yield(t0
, cpu_env
, t0
);
18990 gen_store_gpr(t0
, rt
);
18996 gen_reserved_instruction(ctx
);
19002 static void gen_pool32axf_1_5_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19003 int ret
, int v1
, int v2
)
19009 t0
= tcg_temp_new_i32();
19011 v0_t
= tcg_temp_new();
19012 v1_t
= tcg_temp_new();
19014 tcg_gen_movi_i32(t0
, v2
>> 3);
19016 gen_load_gpr(v0_t
, ret
);
19017 gen_load_gpr(v1_t
, v1
);
19020 case NM_MAQ_S_W_PHR
:
19022 gen_helper_maq_s_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
19024 case NM_MAQ_S_W_PHL
:
19026 gen_helper_maq_s_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19028 case NM_MAQ_SA_W_PHR
:
19030 gen_helper_maq_sa_w_phr(t0
, v1_t
, v0_t
, cpu_env
);
19032 case NM_MAQ_SA_W_PHL
:
19034 gen_helper_maq_sa_w_phl(t0
, v1_t
, v0_t
, cpu_env
);
19037 gen_reserved_instruction(ctx
);
19041 tcg_temp_free_i32(t0
);
19043 tcg_temp_free(v0_t
);
19044 tcg_temp_free(v1_t
);
19048 static void gen_pool32axf_1_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19049 int ret
, int v1
, int v2
)
19052 TCGv t0
= tcg_temp_new();
19053 TCGv t1
= tcg_temp_new();
19054 TCGv v0_t
= tcg_temp_new();
19056 gen_load_gpr(v0_t
, v1
);
19059 case NM_POOL32AXF_1_0
:
19061 switch (extract32(ctx
->opcode
, 12, 2)) {
19063 gen_HILO(ctx
, OPC_MFHI
, v2
>> 3, ret
);
19066 gen_HILO(ctx
, OPC_MFLO
, v2
>> 3, ret
);
19069 gen_HILO(ctx
, OPC_MTHI
, v2
>> 3, v1
);
19072 gen_HILO(ctx
, OPC_MTLO
, v2
>> 3, v1
);
19076 case NM_POOL32AXF_1_1
:
19078 switch (extract32(ctx
->opcode
, 12, 2)) {
19080 tcg_gen_movi_tl(t0
, v2
);
19081 gen_helper_mthlip(t0
, v0_t
, cpu_env
);
19084 tcg_gen_movi_tl(t0
, v2
>> 3);
19085 gen_helper_shilo(t0
, v0_t
, cpu_env
);
19088 gen_reserved_instruction(ctx
);
19092 case NM_POOL32AXF_1_3
:
19094 imm
= extract32(ctx
->opcode
, 14, 7);
19095 switch (extract32(ctx
->opcode
, 12, 2)) {
19097 tcg_gen_movi_tl(t0
, imm
);
19098 gen_helper_rddsp(t0
, t0
, cpu_env
);
19099 gen_store_gpr(t0
, ret
);
19102 gen_load_gpr(t0
, ret
);
19103 tcg_gen_movi_tl(t1
, imm
);
19104 gen_helper_wrdsp(t0
, t1
, cpu_env
);
19107 tcg_gen_movi_tl(t0
, v2
>> 3);
19108 tcg_gen_movi_tl(t1
, v1
);
19109 gen_helper_extp(t0
, t0
, t1
, cpu_env
);
19110 gen_store_gpr(t0
, ret
);
19113 tcg_gen_movi_tl(t0
, v2
>> 3);
19114 tcg_gen_movi_tl(t1
, v1
);
19115 gen_helper_extpdp(t0
, t0
, t1
, cpu_env
);
19116 gen_store_gpr(t0
, ret
);
19120 case NM_POOL32AXF_1_4
:
19122 tcg_gen_movi_tl(t0
, v2
>> 2);
19123 switch (extract32(ctx
->opcode
, 12, 1)) {
19125 gen_helper_shll_qb(t0
, t0
, v0_t
, cpu_env
);
19126 gen_store_gpr(t0
, ret
);
19129 gen_helper_shrl_qb(t0
, t0
, v0_t
);
19130 gen_store_gpr(t0
, ret
);
19134 case NM_POOL32AXF_1_5
:
19135 opc
= extract32(ctx
->opcode
, 12, 2);
19136 gen_pool32axf_1_5_nanomips_insn(ctx
, opc
, ret
, v1
, v2
);
19138 case NM_POOL32AXF_1_7
:
19140 tcg_gen_movi_tl(t0
, v2
>> 3);
19141 tcg_gen_movi_tl(t1
, v1
);
19142 switch (extract32(ctx
->opcode
, 12, 2)) {
19144 gen_helper_extr_w(t0
, t0
, t1
, cpu_env
);
19145 gen_store_gpr(t0
, ret
);
19148 gen_helper_extr_r_w(t0
, t0
, t1
, cpu_env
);
19149 gen_store_gpr(t0
, ret
);
19152 gen_helper_extr_rs_w(t0
, t0
, t1
, cpu_env
);
19153 gen_store_gpr(t0
, ret
);
19156 gen_helper_extr_s_h(t0
, t0
, t1
, cpu_env
);
19157 gen_store_gpr(t0
, ret
);
19162 gen_reserved_instruction(ctx
);
19168 tcg_temp_free(v0_t
);
19171 static void gen_pool32axf_2_multiply(DisasContext
*ctx
, uint32_t opc
,
19172 TCGv v0
, TCGv v1
, int rd
)
19176 t0
= tcg_temp_new_i32();
19178 tcg_gen_movi_i32(t0
, rd
>> 3);
19181 case NM_POOL32AXF_2_0_7
:
19182 switch (extract32(ctx
->opcode
, 9, 3)) {
19185 gen_helper_dpa_w_ph(t0
, v1
, v0
, cpu_env
);
19187 case NM_DPAQ_S_W_PH
:
19189 gen_helper_dpaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19193 gen_helper_dps_w_ph(t0
, v1
, v0
, cpu_env
);
19195 case NM_DPSQ_S_W_PH
:
19197 gen_helper_dpsq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19200 gen_reserved_instruction(ctx
);
19204 case NM_POOL32AXF_2_8_15
:
19205 switch (extract32(ctx
->opcode
, 9, 3)) {
19208 gen_helper_dpax_w_ph(t0
, v0
, v1
, cpu_env
);
19210 case NM_DPAQ_SA_L_W
:
19212 gen_helper_dpaq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19216 gen_helper_dpsx_w_ph(t0
, v0
, v1
, cpu_env
);
19218 case NM_DPSQ_SA_L_W
:
19220 gen_helper_dpsq_sa_l_w(t0
, v0
, v1
, cpu_env
);
19223 gen_reserved_instruction(ctx
);
19227 case NM_POOL32AXF_2_16_23
:
19228 switch (extract32(ctx
->opcode
, 9, 3)) {
19229 case NM_DPAU_H_QBL
:
19231 gen_helper_dpau_h_qbl(t0
, v0
, v1
, cpu_env
);
19233 case NM_DPAQX_S_W_PH
:
19235 gen_helper_dpaqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19237 case NM_DPSU_H_QBL
:
19239 gen_helper_dpsu_h_qbl(t0
, v0
, v1
, cpu_env
);
19241 case NM_DPSQX_S_W_PH
:
19243 gen_helper_dpsqx_s_w_ph(t0
, v0
, v1
, cpu_env
);
19245 case NM_MULSA_W_PH
:
19247 gen_helper_mulsa_w_ph(t0
, v0
, v1
, cpu_env
);
19250 gen_reserved_instruction(ctx
);
19254 case NM_POOL32AXF_2_24_31
:
19255 switch (extract32(ctx
->opcode
, 9, 3)) {
19256 case NM_DPAU_H_QBR
:
19258 gen_helper_dpau_h_qbr(t0
, v1
, v0
, cpu_env
);
19260 case NM_DPAQX_SA_W_PH
:
19262 gen_helper_dpaqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19264 case NM_DPSU_H_QBR
:
19266 gen_helper_dpsu_h_qbr(t0
, v1
, v0
, cpu_env
);
19268 case NM_DPSQX_SA_W_PH
:
19270 gen_helper_dpsqx_sa_w_ph(t0
, v1
, v0
, cpu_env
);
19272 case NM_MULSAQ_S_W_PH
:
19274 gen_helper_mulsaq_s_w_ph(t0
, v1
, v0
, cpu_env
);
19277 gen_reserved_instruction(ctx
);
19282 gen_reserved_instruction(ctx
);
19286 tcg_temp_free_i32(t0
);
19289 static void gen_pool32axf_2_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19290 int rt
, int rs
, int rd
)
19293 TCGv t0
= tcg_temp_new();
19294 TCGv t1
= tcg_temp_new();
19295 TCGv v0_t
= tcg_temp_new();
19296 TCGv v1_t
= tcg_temp_new();
19298 gen_load_gpr(v0_t
, rt
);
19299 gen_load_gpr(v1_t
, rs
);
19302 case NM_POOL32AXF_2_0_7
:
19303 switch (extract32(ctx
->opcode
, 9, 3)) {
19305 case NM_DPAQ_S_W_PH
:
19307 case NM_DPSQ_S_W_PH
:
19308 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19313 gen_load_gpr(t0
, rs
);
19315 if (rd
!= 0 && rd
!= 2) {
19316 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 8 * rd
);
19317 tcg_gen_ext32u_tl(t0
, t0
);
19318 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - rd
));
19319 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
19321 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
19327 int acc
= extract32(ctx
->opcode
, 14, 2);
19328 TCGv_i64 t2
= tcg_temp_new_i64();
19329 TCGv_i64 t3
= tcg_temp_new_i64();
19331 gen_load_gpr(t0
, rt
);
19332 gen_load_gpr(t1
, rs
);
19333 tcg_gen_ext_tl_i64(t2
, t0
);
19334 tcg_gen_ext_tl_i64(t3
, t1
);
19335 tcg_gen_mul_i64(t2
, t2
, t3
);
19336 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19337 tcg_gen_add_i64(t2
, t2
, t3
);
19338 tcg_temp_free_i64(t3
);
19339 gen_move_low32(cpu_LO
[acc
], t2
);
19340 gen_move_high32(cpu_HI
[acc
], t2
);
19341 tcg_temp_free_i64(t2
);
19347 int acc
= extract32(ctx
->opcode
, 14, 2);
19348 TCGv_i32 t2
= tcg_temp_new_i32();
19349 TCGv_i32 t3
= tcg_temp_new_i32();
19351 gen_load_gpr(t0
, rs
);
19352 gen_load_gpr(t1
, rt
);
19353 tcg_gen_trunc_tl_i32(t2
, t0
);
19354 tcg_gen_trunc_tl_i32(t3
, t1
);
19355 tcg_gen_muls2_i32(t2
, t3
, t2
, t3
);
19356 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19357 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19358 tcg_temp_free_i32(t2
);
19359 tcg_temp_free_i32(t3
);
19364 gen_load_gpr(v1_t
, rs
);
19365 tcg_gen_movi_tl(t0
, rd
>> 3);
19366 gen_helper_extr_w(t0
, t0
, v1_t
, cpu_env
);
19367 gen_store_gpr(t0
, ret
);
19371 case NM_POOL32AXF_2_8_15
:
19372 switch (extract32(ctx
->opcode
, 9, 3)) {
19374 case NM_DPAQ_SA_L_W
:
19376 case NM_DPSQ_SA_L_W
:
19377 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19382 int acc
= extract32(ctx
->opcode
, 14, 2);
19383 TCGv_i64 t2
= tcg_temp_new_i64();
19384 TCGv_i64 t3
= tcg_temp_new_i64();
19386 gen_load_gpr(t0
, rs
);
19387 gen_load_gpr(t1
, rt
);
19388 tcg_gen_ext32u_tl(t0
, t0
);
19389 tcg_gen_ext32u_tl(t1
, t1
);
19390 tcg_gen_extu_tl_i64(t2
, t0
);
19391 tcg_gen_extu_tl_i64(t3
, t1
);
19392 tcg_gen_mul_i64(t2
, t2
, t3
);
19393 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19394 tcg_gen_add_i64(t2
, t2
, t3
);
19395 tcg_temp_free_i64(t3
);
19396 gen_move_low32(cpu_LO
[acc
], t2
);
19397 gen_move_high32(cpu_HI
[acc
], t2
);
19398 tcg_temp_free_i64(t2
);
19404 int acc
= extract32(ctx
->opcode
, 14, 2);
19405 TCGv_i32 t2
= tcg_temp_new_i32();
19406 TCGv_i32 t3
= tcg_temp_new_i32();
19408 gen_load_gpr(t0
, rs
);
19409 gen_load_gpr(t1
, rt
);
19410 tcg_gen_trunc_tl_i32(t2
, t0
);
19411 tcg_gen_trunc_tl_i32(t3
, t1
);
19412 tcg_gen_mulu2_i32(t2
, t3
, t2
, t3
);
19413 tcg_gen_ext_i32_tl(cpu_LO
[acc
], t2
);
19414 tcg_gen_ext_i32_tl(cpu_HI
[acc
], t3
);
19415 tcg_temp_free_i32(t2
);
19416 tcg_temp_free_i32(t3
);
19421 tcg_gen_movi_tl(t0
, rd
>> 3);
19422 gen_helper_extr_r_w(t0
, t0
, v1_t
, cpu_env
);
19423 gen_store_gpr(t0
, ret
);
19426 gen_reserved_instruction(ctx
);
19430 case NM_POOL32AXF_2_16_23
:
19431 switch (extract32(ctx
->opcode
, 9, 3)) {
19432 case NM_DPAU_H_QBL
:
19433 case NM_DPAQX_S_W_PH
:
19434 case NM_DPSU_H_QBL
:
19435 case NM_DPSQX_S_W_PH
:
19436 case NM_MULSA_W_PH
:
19437 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19441 tcg_gen_movi_tl(t0
, rd
>> 3);
19442 gen_helper_extp(t0
, t0
, v1_t
, cpu_env
);
19443 gen_store_gpr(t0
, ret
);
19448 int acc
= extract32(ctx
->opcode
, 14, 2);
19449 TCGv_i64 t2
= tcg_temp_new_i64();
19450 TCGv_i64 t3
= tcg_temp_new_i64();
19452 gen_load_gpr(t0
, rs
);
19453 gen_load_gpr(t1
, rt
);
19454 tcg_gen_ext_tl_i64(t2
, t0
);
19455 tcg_gen_ext_tl_i64(t3
, t1
);
19456 tcg_gen_mul_i64(t2
, t2
, t3
);
19457 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19458 tcg_gen_sub_i64(t2
, t3
, t2
);
19459 tcg_temp_free_i64(t3
);
19460 gen_move_low32(cpu_LO
[acc
], t2
);
19461 gen_move_high32(cpu_HI
[acc
], t2
);
19462 tcg_temp_free_i64(t2
);
19465 case NM_EXTRV_RS_W
:
19467 tcg_gen_movi_tl(t0
, rd
>> 3);
19468 gen_helper_extr_rs_w(t0
, t0
, v1_t
, cpu_env
);
19469 gen_store_gpr(t0
, ret
);
19473 case NM_POOL32AXF_2_24_31
:
19474 switch (extract32(ctx
->opcode
, 9, 3)) {
19475 case NM_DPAU_H_QBR
:
19476 case NM_DPAQX_SA_W_PH
:
19477 case NM_DPSU_H_QBR
:
19478 case NM_DPSQX_SA_W_PH
:
19479 case NM_MULSAQ_S_W_PH
:
19480 gen_pool32axf_2_multiply(ctx
, opc
, v0_t
, v1_t
, rd
);
19484 tcg_gen_movi_tl(t0
, rd
>> 3);
19485 gen_helper_extpdp(t0
, t0
, v1_t
, cpu_env
);
19486 gen_store_gpr(t0
, ret
);
19491 int acc
= extract32(ctx
->opcode
, 14, 2);
19492 TCGv_i64 t2
= tcg_temp_new_i64();
19493 TCGv_i64 t3
= tcg_temp_new_i64();
19495 gen_load_gpr(t0
, rs
);
19496 gen_load_gpr(t1
, rt
);
19497 tcg_gen_ext32u_tl(t0
, t0
);
19498 tcg_gen_ext32u_tl(t1
, t1
);
19499 tcg_gen_extu_tl_i64(t2
, t0
);
19500 tcg_gen_extu_tl_i64(t3
, t1
);
19501 tcg_gen_mul_i64(t2
, t2
, t3
);
19502 tcg_gen_concat_tl_i64(t3
, cpu_LO
[acc
], cpu_HI
[acc
]);
19503 tcg_gen_sub_i64(t2
, t3
, t2
);
19504 tcg_temp_free_i64(t3
);
19505 gen_move_low32(cpu_LO
[acc
], t2
);
19506 gen_move_high32(cpu_HI
[acc
], t2
);
19507 tcg_temp_free_i64(t2
);
19512 tcg_gen_movi_tl(t0
, rd
>> 3);
19513 gen_helper_extr_s_h(t0
, t0
, v0_t
, cpu_env
);
19514 gen_store_gpr(t0
, ret
);
19519 gen_reserved_instruction(ctx
);
19526 tcg_temp_free(v0_t
);
19527 tcg_temp_free(v1_t
);
19530 static void gen_pool32axf_4_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19534 TCGv t0
= tcg_temp_new();
19535 TCGv v0_t
= tcg_temp_new();
19537 gen_load_gpr(v0_t
, rs
);
19542 gen_helper_absq_s_qb(v0_t
, v0_t
, cpu_env
);
19543 gen_store_gpr(v0_t
, ret
);
19547 gen_helper_absq_s_ph(v0_t
, v0_t
, cpu_env
);
19548 gen_store_gpr(v0_t
, ret
);
19552 gen_helper_absq_s_w(v0_t
, v0_t
, cpu_env
);
19553 gen_store_gpr(v0_t
, ret
);
19555 case NM_PRECEQ_W_PHL
:
19557 tcg_gen_andi_tl(v0_t
, v0_t
, 0xFFFF0000);
19558 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19559 gen_store_gpr(v0_t
, ret
);
19561 case NM_PRECEQ_W_PHR
:
19563 tcg_gen_andi_tl(v0_t
, v0_t
, 0x0000FFFF);
19564 tcg_gen_shli_tl(v0_t
, v0_t
, 16);
19565 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19566 gen_store_gpr(v0_t
, ret
);
19568 case NM_PRECEQU_PH_QBL
:
19570 gen_helper_precequ_ph_qbl(v0_t
, v0_t
);
19571 gen_store_gpr(v0_t
, ret
);
19573 case NM_PRECEQU_PH_QBR
:
19575 gen_helper_precequ_ph_qbr(v0_t
, v0_t
);
19576 gen_store_gpr(v0_t
, ret
);
19578 case NM_PRECEQU_PH_QBLA
:
19580 gen_helper_precequ_ph_qbla(v0_t
, v0_t
);
19581 gen_store_gpr(v0_t
, ret
);
19583 case NM_PRECEQU_PH_QBRA
:
19585 gen_helper_precequ_ph_qbra(v0_t
, v0_t
);
19586 gen_store_gpr(v0_t
, ret
);
19588 case NM_PRECEU_PH_QBL
:
19590 gen_helper_preceu_ph_qbl(v0_t
, v0_t
);
19591 gen_store_gpr(v0_t
, ret
);
19593 case NM_PRECEU_PH_QBR
:
19595 gen_helper_preceu_ph_qbr(v0_t
, v0_t
);
19596 gen_store_gpr(v0_t
, ret
);
19598 case NM_PRECEU_PH_QBLA
:
19600 gen_helper_preceu_ph_qbla(v0_t
, v0_t
);
19601 gen_store_gpr(v0_t
, ret
);
19603 case NM_PRECEU_PH_QBRA
:
19605 gen_helper_preceu_ph_qbra(v0_t
, v0_t
);
19606 gen_store_gpr(v0_t
, ret
);
19610 tcg_gen_ext16u_tl(v0_t
, v0_t
);
19611 tcg_gen_shli_tl(t0
, v0_t
, 16);
19612 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19613 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19614 gen_store_gpr(v0_t
, ret
);
19618 tcg_gen_ext8u_tl(v0_t
, v0_t
);
19619 tcg_gen_shli_tl(t0
, v0_t
, 8);
19620 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19621 tcg_gen_shli_tl(t0
, v0_t
, 16);
19622 tcg_gen_or_tl(v0_t
, v0_t
, t0
);
19623 tcg_gen_ext32s_tl(v0_t
, v0_t
);
19624 gen_store_gpr(v0_t
, ret
);
19628 gen_helper_bitrev(v0_t
, v0_t
);
19629 gen_store_gpr(v0_t
, ret
);
19634 TCGv tv0
= tcg_temp_new();
19636 gen_load_gpr(tv0
, rt
);
19637 gen_helper_insv(v0_t
, cpu_env
, v0_t
, tv0
);
19638 gen_store_gpr(v0_t
, ret
);
19639 tcg_temp_free(tv0
);
19642 case NM_RADDU_W_QB
:
19644 gen_helper_raddu_w_qb(v0_t
, v0_t
);
19645 gen_store_gpr(v0_t
, ret
);
19648 gen_bitswap(ctx
, OPC_BITSWAP
, ret
, rs
);
19652 gen_cl(ctx
, OPC_CLO
, ret
, rs
);
19656 gen_cl(ctx
, OPC_CLZ
, ret
, rs
);
19659 gen_bshfl(ctx
, OPC_WSBH
, ret
, rs
);
19662 gen_reserved_instruction(ctx
);
19666 tcg_temp_free(v0_t
);
19670 static void gen_pool32axf_7_nanomips_insn(DisasContext
*ctx
, uint32_t opc
,
19671 int rt
, int rs
, int rd
)
19673 TCGv t0
= tcg_temp_new();
19674 TCGv rs_t
= tcg_temp_new();
19676 gen_load_gpr(rs_t
, rs
);
19681 tcg_gen_movi_tl(t0
, rd
>> 2);
19682 switch (extract32(ctx
->opcode
, 12, 1)) {
19685 gen_helper_shra_qb(t0
, t0
, rs_t
);
19686 gen_store_gpr(t0
, rt
);
19690 gen_helper_shra_r_qb(t0
, t0
, rs_t
);
19691 gen_store_gpr(t0
, rt
);
19697 tcg_gen_movi_tl(t0
, rd
>> 1);
19698 gen_helper_shrl_ph(t0
, t0
, rs_t
);
19699 gen_store_gpr(t0
, rt
);
19705 target_long result
;
19706 imm
= extract32(ctx
->opcode
, 13, 8);
19707 result
= (uint32_t)imm
<< 24 |
19708 (uint32_t)imm
<< 16 |
19709 (uint32_t)imm
<< 8 |
19711 result
= (int32_t)result
;
19712 tcg_gen_movi_tl(t0
, result
);
19713 gen_store_gpr(t0
, rt
);
19717 gen_reserved_instruction(ctx
);
19721 tcg_temp_free(rs_t
);
19725 static void gen_pool32axf_nanomips_insn(CPUMIPSState
*env
, DisasContext
*ctx
)
19727 int rt
= extract32(ctx
->opcode
, 21, 5);
19728 int rs
= extract32(ctx
->opcode
, 16, 5);
19729 int rd
= extract32(ctx
->opcode
, 11, 5);
19731 switch (extract32(ctx
->opcode
, 6, 3)) {
19732 case NM_POOL32AXF_1
:
19734 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19735 gen_pool32axf_1_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19738 case NM_POOL32AXF_2
:
19740 int32_t op1
= extract32(ctx
->opcode
, 12, 2);
19741 gen_pool32axf_2_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19744 case NM_POOL32AXF_4
:
19746 int32_t op1
= extract32(ctx
->opcode
, 9, 7);
19747 gen_pool32axf_4_nanomips_insn(ctx
, op1
, rt
, rs
);
19750 case NM_POOL32AXF_5
:
19751 switch (extract32(ctx
->opcode
, 9, 7)) {
19752 #ifndef CONFIG_USER_ONLY
19754 gen_cp0(env
, ctx
, OPC_TLBP
, 0, 0);
19757 gen_cp0(env
, ctx
, OPC_TLBR
, 0, 0);
19760 gen_cp0(env
, ctx
, OPC_TLBWI
, 0, 0);
19763 gen_cp0(env
, ctx
, OPC_TLBWR
, 0, 0);
19766 gen_cp0(env
, ctx
, OPC_TLBINV
, 0, 0);
19769 gen_cp0(env
, ctx
, OPC_TLBINVF
, 0, 0);
19772 check_cp0_enabled(ctx
);
19774 TCGv t0
= tcg_temp_new();
19776 save_cpu_state(ctx
, 1);
19777 gen_helper_di(t0
, cpu_env
);
19778 gen_store_gpr(t0
, rt
);
19779 /* Stop translation as we may have switched the execution mode */
19780 ctx
->base
.is_jmp
= DISAS_STOP
;
19785 check_cp0_enabled(ctx
);
19787 TCGv t0
= tcg_temp_new();
19789 save_cpu_state(ctx
, 1);
19790 gen_helper_ei(t0
, cpu_env
);
19791 gen_store_gpr(t0
, rt
);
19792 /* Stop translation as we may have switched the execution mode */
19793 ctx
->base
.is_jmp
= DISAS_STOP
;
19798 gen_load_srsgpr(rs
, rt
);
19801 gen_store_srsgpr(rs
, rt
);
19804 gen_cp0(env
, ctx
, OPC_WAIT
, 0, 0);
19807 gen_cp0(env
, ctx
, OPC_DERET
, 0, 0);
19810 gen_cp0(env
, ctx
, OPC_ERET
, 0, 0);
19814 gen_reserved_instruction(ctx
);
19818 case NM_POOL32AXF_7
:
19820 int32_t op1
= extract32(ctx
->opcode
, 9, 3);
19821 gen_pool32axf_7_nanomips_insn(ctx
, op1
, rt
, rs
, rd
);
19825 gen_reserved_instruction(ctx
);
19830 /* Immediate Value Compact Branches */
19831 static void gen_compute_imm_branch(DisasContext
*ctx
, uint32_t opc
,
19832 int rt
, int32_t imm
, int32_t offset
)
19834 TCGCond cond
= TCG_COND_ALWAYS
;
19835 TCGv t0
= tcg_temp_new();
19836 TCGv t1
= tcg_temp_new();
19838 gen_load_gpr(t0
, rt
);
19839 tcg_gen_movi_tl(t1
, imm
);
19840 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19842 /* Load needed operands and calculate btarget */
19845 if (rt
== 0 && imm
== 0) {
19846 /* Unconditional branch */
19847 } else if (rt
== 0 && imm
!= 0) {
19851 cond
= TCG_COND_EQ
;
19857 if (imm
>= 32 && !(ctx
->hflags
& MIPS_HFLAG_64
)) {
19858 gen_reserved_instruction(ctx
);
19860 } else if (rt
== 0 && opc
== NM_BBEQZC
) {
19861 /* Unconditional branch */
19862 } else if (rt
== 0 && opc
== NM_BBNEZC
) {
19866 tcg_gen_shri_tl(t0
, t0
, imm
);
19867 tcg_gen_andi_tl(t0
, t0
, 1);
19868 tcg_gen_movi_tl(t1
, 0);
19869 if (opc
== NM_BBEQZC
) {
19870 cond
= TCG_COND_EQ
;
19872 cond
= TCG_COND_NE
;
19877 if (rt
== 0 && imm
== 0) {
19880 } else if (rt
== 0 && imm
!= 0) {
19881 /* Unconditional branch */
19883 cond
= TCG_COND_NE
;
19887 if (rt
== 0 && imm
== 0) {
19888 /* Unconditional branch */
19890 cond
= TCG_COND_GE
;
19894 cond
= TCG_COND_LT
;
19897 if (rt
== 0 && imm
== 0) {
19898 /* Unconditional branch */
19900 cond
= TCG_COND_GEU
;
19904 cond
= TCG_COND_LTU
;
19907 MIPS_INVAL("Immediate Value Compact branch");
19908 gen_reserved_instruction(ctx
);
19912 /* branch completion */
19913 clear_branch_hflags(ctx
);
19914 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19916 if (cond
== TCG_COND_ALWAYS
) {
19917 /* Uncoditional compact branch */
19918 gen_goto_tb(ctx
, 0, ctx
->btarget
);
19920 /* Conditional compact branch */
19921 TCGLabel
*fs
= gen_new_label();
19923 tcg_gen_brcond_tl(tcg_invert_cond(cond
), t0
, t1
, fs
);
19925 gen_goto_tb(ctx
, 1, ctx
->btarget
);
19928 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
19936 /* P.BALRSC type nanoMIPS R6 branches: BALRSC and BRSC */
19937 static void gen_compute_nanomips_pbalrsc_branch(DisasContext
*ctx
, int rs
,
19940 TCGv t0
= tcg_temp_new();
19941 TCGv t1
= tcg_temp_new();
19944 gen_load_gpr(t0
, rs
);
19948 tcg_gen_movi_tl(cpu_gpr
[rt
], ctx
->base
.pc_next
+ 4);
19951 /* calculate btarget */
19952 tcg_gen_shli_tl(t0
, t0
, 1);
19953 tcg_gen_movi_tl(t1
, ctx
->base
.pc_next
+ 4);
19954 gen_op_addr_add(ctx
, btarget
, t1
, t0
);
19956 /* branch completion */
19957 clear_branch_hflags(ctx
);
19958 ctx
->base
.is_jmp
= DISAS_NORETURN
;
19960 /* unconditional branch to register */
19961 tcg_gen_mov_tl(cpu_PC
, btarget
);
19962 tcg_gen_lookup_and_goto_ptr();
19968 /* nanoMIPS Branches */
19969 static void gen_compute_compact_branch_nm(DisasContext
*ctx
, uint32_t opc
,
19970 int rs
, int rt
, int32_t offset
)
19972 int bcond_compute
= 0;
19973 TCGv t0
= tcg_temp_new();
19974 TCGv t1
= tcg_temp_new();
19976 /* Load needed operands and calculate btarget */
19978 /* compact branch */
19981 gen_load_gpr(t0
, rs
);
19982 gen_load_gpr(t1
, rt
);
19984 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19988 if (rs
== 0 || rs
== rt
) {
19989 /* OPC_BLEZALC, OPC_BGEZALC */
19990 /* OPC_BGTZALC, OPC_BLTZALC */
19991 tcg_gen_movi_tl(cpu_gpr
[31], ctx
->base
.pc_next
+ 4);
19993 gen_load_gpr(t0
, rs
);
19994 gen_load_gpr(t1
, rt
);
19996 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
19999 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20003 /* OPC_BEQZC, OPC_BNEZC */
20004 gen_load_gpr(t0
, rs
);
20006 ctx
->btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20008 /* OPC_JIC, OPC_JIALC */
20009 TCGv tbase
= tcg_temp_new();
20010 TCGv toffset
= tcg_temp_new();
20012 gen_load_gpr(tbase
, rt
);
20013 tcg_gen_movi_tl(toffset
, offset
);
20014 gen_op_addr_add(ctx
, btarget
, tbase
, toffset
);
20015 tcg_temp_free(tbase
);
20016 tcg_temp_free(toffset
);
20020 MIPS_INVAL("Compact branch/jump");
20021 gen_reserved_instruction(ctx
);
20025 if (bcond_compute
== 0) {
20026 /* Uncoditional compact branch */
20029 gen_goto_tb(ctx
, 0, ctx
->btarget
);
20032 MIPS_INVAL("Compact branch/jump");
20033 gen_reserved_instruction(ctx
);
20037 /* Conditional compact branch */
20038 TCGLabel
*fs
= gen_new_label();
20042 if (rs
== 0 && rt
!= 0) {
20044 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20045 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20047 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20050 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GEU
), t0
, t1
, fs
);
20054 if (rs
== 0 && rt
!= 0) {
20056 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20057 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20059 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20062 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LTU
), t0
, t1
, fs
);
20066 if (rs
== 0 && rt
!= 0) {
20068 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LE
), t1
, 0, fs
);
20069 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20071 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GE
), t1
, 0, fs
);
20074 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_GE
), t0
, t1
, fs
);
20078 if (rs
== 0 && rt
!= 0) {
20080 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_GT
), t1
, 0, fs
);
20081 } else if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
20083 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_LT
), t1
, 0, fs
);
20086 tcg_gen_brcond_tl(tcg_invert_cond(TCG_COND_LT
), t0
, t1
, fs
);
20090 tcg_gen_brcondi_tl(tcg_invert_cond(TCG_COND_EQ
), t0
, 0, fs
);
20093 MIPS_INVAL("Compact conditional branch/jump");
20094 gen_reserved_instruction(ctx
);
20098 /* branch completion */
20099 clear_branch_hflags(ctx
);
20100 ctx
->base
.is_jmp
= DISAS_NORETURN
;
20102 /* Generating branch here as compact branches don't have delay slot */
20103 gen_goto_tb(ctx
, 1, ctx
->btarget
);
20106 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
+ 4);
20115 /* nanoMIPS CP1 Branches */
20116 static void gen_compute_branch_cp1_nm(DisasContext
*ctx
, uint32_t op
,
20117 int32_t ft
, int32_t offset
)
20119 target_ulong btarget
;
20120 TCGv_i64 t0
= tcg_temp_new_i64();
20122 gen_load_fpr64(ctx
, t0
, ft
);
20123 tcg_gen_andi_i64(t0
, t0
, 1);
20125 btarget
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
20129 tcg_gen_xori_i64(t0
, t0
, 1);
20130 ctx
->hflags
|= MIPS_HFLAG_BC
;
20133 /* t0 already set */
20134 ctx
->hflags
|= MIPS_HFLAG_BC
;
20137 MIPS_INVAL("cp1 cond branch");
20138 gen_reserved_instruction(ctx
);
20142 tcg_gen_trunc_i64_tl(bcond
, t0
);
20144 ctx
->btarget
= btarget
;
20147 tcg_temp_free_i64(t0
);
20151 static void gen_p_lsx(DisasContext
*ctx
, int rd
, int rs
, int rt
)
20154 t0
= tcg_temp_new();
20155 t1
= tcg_temp_new();
20157 gen_load_gpr(t0
, rs
);
20158 gen_load_gpr(t1
, rt
);
20160 if ((extract32(ctx
->opcode
, 6, 1)) == 1) {
20161 /* PP.LSXS instructions require shifting */
20162 switch (extract32(ctx
->opcode
, 7, 4)) {
20168 tcg_gen_shli_tl(t0
, t0
, 1);
20176 tcg_gen_shli_tl(t0
, t0
, 2);
20180 tcg_gen_shli_tl(t0
, t0
, 3);
20184 gen_op_addr_add(ctx
, t0
, t0
, t1
);
20186 switch (extract32(ctx
->opcode
, 7, 4)) {
20188 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20190 gen_store_gpr(t0
, rd
);
20194 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20196 gen_store_gpr(t0
, rd
);
20200 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20202 gen_store_gpr(t0
, rd
);
20205 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20207 gen_store_gpr(t0
, rd
);
20211 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
,
20213 gen_store_gpr(t0
, rd
);
20217 gen_load_gpr(t1
, rd
);
20218 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20224 gen_load_gpr(t1
, rd
);
20225 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20231 gen_load_gpr(t1
, rd
);
20232 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
,
20236 /*case NM_LWC1XS:*/
20238 /*case NM_LDC1XS:*/
20240 /*case NM_SWC1XS:*/
20242 /*case NM_SDC1XS:*/
20243 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
20244 check_cp1_enabled(ctx
);
20245 switch (extract32(ctx
->opcode
, 7, 4)) {
20247 /*case NM_LWC1XS:*/
20248 gen_flt_ldst(ctx
, OPC_LWC1
, rd
, t0
);
20251 /*case NM_LDC1XS:*/
20252 gen_flt_ldst(ctx
, OPC_LDC1
, rd
, t0
);
20255 /*case NM_SWC1XS:*/
20256 gen_flt_ldst(ctx
, OPC_SWC1
, rd
, t0
);
20259 /*case NM_SDC1XS:*/
20260 gen_flt_ldst(ctx
, OPC_SDC1
, rd
, t0
);
20264 generate_exception_err(ctx
, EXCP_CpU
, 1);
20268 gen_reserved_instruction(ctx
);
20276 static void gen_pool32f_nanomips_insn(DisasContext
*ctx
)
20280 rt
= extract32(ctx
->opcode
, 21, 5);
20281 rs
= extract32(ctx
->opcode
, 16, 5);
20282 rd
= extract32(ctx
->opcode
, 11, 5);
20284 if (!(ctx
->CP0_Config1
& (1 << CP0C1_FP
))) {
20285 gen_reserved_instruction(ctx
);
20288 check_cp1_enabled(ctx
);
20289 switch (extract32(ctx
->opcode
, 0, 3)) {
20291 switch (extract32(ctx
->opcode
, 3, 7)) {
20293 gen_farith(ctx
, OPC_RINT_S
, 0, rt
, rs
, 0);
20296 gen_farith(ctx
, OPC_RINT_D
, 0, rt
, rs
, 0);
20299 gen_farith(ctx
, OPC_CLASS_S
, 0, rt
, rs
, 0);
20302 gen_farith(ctx
, OPC_CLASS_D
, 0, rt
, rs
, 0);
20305 gen_farith(ctx
, OPC_ADD_S
, rt
, rs
, rd
, 0);
20308 gen_farith(ctx
, OPC_ADD_D
, rt
, rs
, rd
, 0);
20311 gen_farith(ctx
, OPC_SUB_S
, rt
, rs
, rd
, 0);
20314 gen_farith(ctx
, OPC_SUB_D
, rt
, rs
, rd
, 0);
20317 gen_farith(ctx
, OPC_MUL_S
, rt
, rs
, rd
, 0);
20320 gen_farith(ctx
, OPC_MUL_D
, rt
, rs
, rd
, 0);
20323 gen_farith(ctx
, OPC_DIV_S
, rt
, rs
, rd
, 0);
20326 gen_farith(ctx
, OPC_DIV_D
, rt
, rs
, rd
, 0);
20329 gen_sel_s(ctx
, OPC_SELEQZ_S
, rd
, rt
, rs
);
20332 gen_sel_d(ctx
, OPC_SELEQZ_D
, rd
, rt
, rs
);
20335 gen_sel_s(ctx
, OPC_SELNEZ_S
, rd
, rt
, rs
);
20338 gen_sel_d(ctx
, OPC_SELNEZ_D
, rd
, rt
, rs
);
20341 gen_sel_s(ctx
, OPC_SEL_S
, rd
, rt
, rs
);
20344 gen_sel_d(ctx
, OPC_SEL_D
, rd
, rt
, rs
);
20347 gen_farith(ctx
, OPC_MADDF_S
, rt
, rs
, rd
, 0);
20350 gen_farith(ctx
, OPC_MADDF_D
, rt
, rs
, rd
, 0);
20353 gen_farith(ctx
, OPC_MSUBF_S
, rt
, rs
, rd
, 0);
20356 gen_farith(ctx
, OPC_MSUBF_D
, rt
, rs
, rd
, 0);
20359 gen_reserved_instruction(ctx
);
20364 switch (extract32(ctx
->opcode
, 3, 3)) {
20366 switch (extract32(ctx
->opcode
, 9, 1)) {
20368 gen_farith(ctx
, OPC_MIN_S
, rt
, rs
, rd
, 0);
20371 gen_farith(ctx
, OPC_MIN_D
, rt
, rs
, rd
, 0);
20376 switch (extract32(ctx
->opcode
, 9, 1)) {
20378 gen_farith(ctx
, OPC_MAX_S
, rt
, rs
, rd
, 0);
20381 gen_farith(ctx
, OPC_MAX_D
, rt
, rs
, rd
, 0);
20386 switch (extract32(ctx
->opcode
, 9, 1)) {
20388 gen_farith(ctx
, OPC_MINA_S
, rt
, rs
, rd
, 0);
20391 gen_farith(ctx
, OPC_MINA_D
, rt
, rs
, rd
, 0);
20396 switch (extract32(ctx
->opcode
, 9, 1)) {
20398 gen_farith(ctx
, OPC_MAXA_S
, rt
, rs
, rd
, 0);
20401 gen_farith(ctx
, OPC_MAXA_D
, rt
, rs
, rd
, 0);
20406 switch (extract32(ctx
->opcode
, 6, 8)) {
20408 gen_cp1(ctx
, OPC_CFC1
, rt
, rs
);
20411 gen_cp1(ctx
, OPC_CTC1
, rt
, rs
);
20414 gen_cp1(ctx
, OPC_MFC1
, rt
, rs
);
20417 gen_cp1(ctx
, OPC_MTC1
, rt
, rs
);
20420 gen_cp1(ctx
, OPC_MFHC1
, rt
, rs
);
20423 gen_cp1(ctx
, OPC_MTHC1
, rt
, rs
);
20426 gen_farith(ctx
, OPC_CVT_S_PL
, -1, rs
, rt
, 0);
20429 gen_farith(ctx
, OPC_CVT_S_PU
, -1, rs
, rt
, 0);
20432 switch (extract32(ctx
->opcode
, 6, 9)) {
20434 gen_farith(ctx
, OPC_CVT_L_S
, -1, rs
, rt
, 0);
20437 gen_farith(ctx
, OPC_CVT_L_D
, -1, rs
, rt
, 0);
20440 gen_farith(ctx
, OPC_CVT_W_S
, -1, rs
, rt
, 0);
20443 gen_farith(ctx
, OPC_CVT_W_D
, -1, rs
, rt
, 0);
20446 gen_farith(ctx
, OPC_RSQRT_S
, -1, rs
, rt
, 0);
20449 gen_farith(ctx
, OPC_RSQRT_D
, -1, rs
, rt
, 0);
20452 gen_farith(ctx
, OPC_SQRT_S
, -1, rs
, rt
, 0);
20455 gen_farith(ctx
, OPC_SQRT_D
, -1, rs
, rt
, 0);
20458 gen_farith(ctx
, OPC_RECIP_S
, -1, rs
, rt
, 0);
20461 gen_farith(ctx
, OPC_RECIP_D
, -1, rs
, rt
, 0);
20464 gen_farith(ctx
, OPC_FLOOR_L_S
, -1, rs
, rt
, 0);
20467 gen_farith(ctx
, OPC_FLOOR_L_D
, -1, rs
, rt
, 0);
20470 gen_farith(ctx
, OPC_FLOOR_W_S
, -1, rs
, rt
, 0);
20473 gen_farith(ctx
, OPC_FLOOR_W_D
, -1, rs
, rt
, 0);
20476 gen_farith(ctx
, OPC_CEIL_L_S
, -1, rs
, rt
, 0);
20479 gen_farith(ctx
, OPC_CEIL_L_D
, -1, rs
, rt
, 0);
20482 gen_farith(ctx
, OPC_CEIL_W_S
, -1, rs
, rt
, 0);
20485 gen_farith(ctx
, OPC_CEIL_W_D
, -1, rs
, rt
, 0);
20488 gen_farith(ctx
, OPC_TRUNC_L_S
, -1, rs
, rt
, 0);
20491 gen_farith(ctx
, OPC_TRUNC_L_D
, -1, rs
, rt
, 0);
20494 gen_farith(ctx
, OPC_TRUNC_W_S
, -1, rs
, rt
, 0);
20497 gen_farith(ctx
, OPC_TRUNC_W_D
, -1, rs
, rt
, 0);
20500 gen_farith(ctx
, OPC_ROUND_L_S
, -1, rs
, rt
, 0);
20503 gen_farith(ctx
, OPC_ROUND_L_D
, -1, rs
, rt
, 0);
20506 gen_farith(ctx
, OPC_ROUND_W_S
, -1, rs
, rt
, 0);
20509 gen_farith(ctx
, OPC_ROUND_W_D
, -1, rs
, rt
, 0);
20512 gen_farith(ctx
, OPC_MOV_S
, -1, rs
, rt
, 0);
20515 gen_farith(ctx
, OPC_MOV_D
, -1, rs
, rt
, 0);
20518 gen_farith(ctx
, OPC_ABS_S
, -1, rs
, rt
, 0);
20521 gen_farith(ctx
, OPC_ABS_D
, -1, rs
, rt
, 0);
20524 gen_farith(ctx
, OPC_NEG_S
, -1, rs
, rt
, 0);
20527 gen_farith(ctx
, OPC_NEG_D
, -1, rs
, rt
, 0);
20530 gen_farith(ctx
, OPC_CVT_D_S
, -1, rs
, rt
, 0);
20533 gen_farith(ctx
, OPC_CVT_D_W
, -1, rs
, rt
, 0);
20536 gen_farith(ctx
, OPC_CVT_D_L
, -1, rs
, rt
, 0);
20539 gen_farith(ctx
, OPC_CVT_S_D
, -1, rs
, rt
, 0);
20542 gen_farith(ctx
, OPC_CVT_S_W
, -1, rs
, rt
, 0);
20545 gen_farith(ctx
, OPC_CVT_S_L
, -1, rs
, rt
, 0);
20548 gen_reserved_instruction(ctx
);
20557 switch (extract32(ctx
->opcode
, 3, 3)) {
20558 case NM_CMP_CONDN_S
:
20559 gen_r6_cmp_s(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20561 case NM_CMP_CONDN_D
:
20562 gen_r6_cmp_d(ctx
, extract32(ctx
->opcode
, 6, 5), rt
, rs
, rd
);
20565 gen_reserved_instruction(ctx
);
20570 gen_reserved_instruction(ctx
);
20575 static void gen_pool32a5_nanomips_insn(DisasContext
*ctx
, int opc
,
20576 int rd
, int rs
, int rt
)
20579 TCGv t0
= tcg_temp_new();
20580 TCGv v1_t
= tcg_temp_new();
20581 TCGv v2_t
= tcg_temp_new();
20583 gen_load_gpr(v1_t
, rs
);
20584 gen_load_gpr(v2_t
, rt
);
20589 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
20593 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
20597 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
20599 case NM_CMPU_EQ_QB
:
20601 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
20603 case NM_CMPU_LT_QB
:
20605 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
20607 case NM_CMPU_LE_QB
:
20609 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
20611 case NM_CMPGU_EQ_QB
:
20613 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20614 gen_store_gpr(v1_t
, ret
);
20616 case NM_CMPGU_LT_QB
:
20618 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20619 gen_store_gpr(v1_t
, ret
);
20621 case NM_CMPGU_LE_QB
:
20623 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20624 gen_store_gpr(v1_t
, ret
);
20626 case NM_CMPGDU_EQ_QB
:
20628 gen_helper_cmpgu_eq_qb(v1_t
, v1_t
, v2_t
);
20629 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20630 gen_store_gpr(v1_t
, ret
);
20632 case NM_CMPGDU_LT_QB
:
20634 gen_helper_cmpgu_lt_qb(v1_t
, v1_t
, v2_t
);
20635 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20636 gen_store_gpr(v1_t
, ret
);
20638 case NM_CMPGDU_LE_QB
:
20640 gen_helper_cmpgu_le_qb(v1_t
, v1_t
, v2_t
);
20641 tcg_gen_deposit_tl(cpu_dspctrl
, cpu_dspctrl
, v1_t
, 24, 4);
20642 gen_store_gpr(v1_t
, ret
);
20646 gen_helper_packrl_ph(v1_t
, v1_t
, v2_t
);
20647 gen_store_gpr(v1_t
, ret
);
20651 gen_helper_pick_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20652 gen_store_gpr(v1_t
, ret
);
20656 gen_helper_pick_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20657 gen_store_gpr(v1_t
, ret
);
20661 gen_helper_addq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20662 gen_store_gpr(v1_t
, ret
);
20666 gen_helper_subq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20667 gen_store_gpr(v1_t
, ret
);
20671 gen_helper_addsc(v1_t
, v1_t
, v2_t
, cpu_env
);
20672 gen_store_gpr(v1_t
, ret
);
20676 gen_helper_addwc(v1_t
, v1_t
, v2_t
, cpu_env
);
20677 gen_store_gpr(v1_t
, ret
);
20681 switch (extract32(ctx
->opcode
, 10, 1)) {
20684 gen_helper_addq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20685 gen_store_gpr(v1_t
, ret
);
20689 gen_helper_addq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20690 gen_store_gpr(v1_t
, ret
);
20694 case NM_ADDQH_R_PH
:
20696 switch (extract32(ctx
->opcode
, 10, 1)) {
20699 gen_helper_addqh_ph(v1_t
, v1_t
, v2_t
);
20700 gen_store_gpr(v1_t
, ret
);
20704 gen_helper_addqh_r_ph(v1_t
, v1_t
, v2_t
);
20705 gen_store_gpr(v1_t
, ret
);
20711 switch (extract32(ctx
->opcode
, 10, 1)) {
20714 gen_helper_addqh_w(v1_t
, v1_t
, v2_t
);
20715 gen_store_gpr(v1_t
, ret
);
20719 gen_helper_addqh_r_w(v1_t
, v1_t
, v2_t
);
20720 gen_store_gpr(v1_t
, ret
);
20726 switch (extract32(ctx
->opcode
, 10, 1)) {
20729 gen_helper_addu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20730 gen_store_gpr(v1_t
, ret
);
20734 gen_helper_addu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20735 gen_store_gpr(v1_t
, ret
);
20741 switch (extract32(ctx
->opcode
, 10, 1)) {
20744 gen_helper_addu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20745 gen_store_gpr(v1_t
, ret
);
20749 gen_helper_addu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20750 gen_store_gpr(v1_t
, ret
);
20754 case NM_ADDUH_R_QB
:
20756 switch (extract32(ctx
->opcode
, 10, 1)) {
20759 gen_helper_adduh_qb(v1_t
, v1_t
, v2_t
);
20760 gen_store_gpr(v1_t
, ret
);
20764 gen_helper_adduh_r_qb(v1_t
, v1_t
, v2_t
);
20765 gen_store_gpr(v1_t
, ret
);
20769 case NM_SHRAV_R_PH
:
20771 switch (extract32(ctx
->opcode
, 10, 1)) {
20774 gen_helper_shra_ph(v1_t
, v1_t
, v2_t
);
20775 gen_store_gpr(v1_t
, ret
);
20779 gen_helper_shra_r_ph(v1_t
, v1_t
, v2_t
);
20780 gen_store_gpr(v1_t
, ret
);
20784 case NM_SHRAV_R_QB
:
20786 switch (extract32(ctx
->opcode
, 10, 1)) {
20789 gen_helper_shra_qb(v1_t
, v1_t
, v2_t
);
20790 gen_store_gpr(v1_t
, ret
);
20794 gen_helper_shra_r_qb(v1_t
, v1_t
, v2_t
);
20795 gen_store_gpr(v1_t
, ret
);
20801 switch (extract32(ctx
->opcode
, 10, 1)) {
20804 gen_helper_subq_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20805 gen_store_gpr(v1_t
, ret
);
20809 gen_helper_subq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20810 gen_store_gpr(v1_t
, ret
);
20814 case NM_SUBQH_R_PH
:
20816 switch (extract32(ctx
->opcode
, 10, 1)) {
20819 gen_helper_subqh_ph(v1_t
, v1_t
, v2_t
);
20820 gen_store_gpr(v1_t
, ret
);
20824 gen_helper_subqh_r_ph(v1_t
, v1_t
, v2_t
);
20825 gen_store_gpr(v1_t
, ret
);
20831 switch (extract32(ctx
->opcode
, 10, 1)) {
20834 gen_helper_subqh_w(v1_t
, v1_t
, v2_t
);
20835 gen_store_gpr(v1_t
, ret
);
20839 gen_helper_subqh_r_w(v1_t
, v1_t
, v2_t
);
20840 gen_store_gpr(v1_t
, ret
);
20846 switch (extract32(ctx
->opcode
, 10, 1)) {
20849 gen_helper_subu_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20850 gen_store_gpr(v1_t
, ret
);
20854 gen_helper_subu_s_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20855 gen_store_gpr(v1_t
, ret
);
20861 switch (extract32(ctx
->opcode
, 10, 1)) {
20864 gen_helper_subu_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20865 gen_store_gpr(v1_t
, ret
);
20869 gen_helper_subu_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20870 gen_store_gpr(v1_t
, ret
);
20874 case NM_SUBUH_R_QB
:
20876 switch (extract32(ctx
->opcode
, 10, 1)) {
20879 gen_helper_subuh_qb(v1_t
, v1_t
, v2_t
);
20880 gen_store_gpr(v1_t
, ret
);
20884 gen_helper_subuh_r_qb(v1_t
, v1_t
, v2_t
);
20885 gen_store_gpr(v1_t
, ret
);
20889 case NM_SHLLV_S_PH
:
20891 switch (extract32(ctx
->opcode
, 10, 1)) {
20894 gen_helper_shll_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20895 gen_store_gpr(v1_t
, ret
);
20899 gen_helper_shll_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20900 gen_store_gpr(v1_t
, ret
);
20904 case NM_PRECR_SRA_R_PH_W
:
20906 switch (extract32(ctx
->opcode
, 10, 1)) {
20908 /* PRECR_SRA_PH_W */
20910 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20911 gen_helper_precr_sra_ph_w(v1_t
, sa_t
, v1_t
,
20913 gen_store_gpr(v1_t
, rt
);
20914 tcg_temp_free_i32(sa_t
);
20918 /* PRECR_SRA_R_PH_W */
20920 TCGv_i32 sa_t
= tcg_const_i32(rd
);
20921 gen_helper_precr_sra_r_ph_w(v1_t
, sa_t
, v1_t
,
20923 gen_store_gpr(v1_t
, rt
);
20924 tcg_temp_free_i32(sa_t
);
20929 case NM_MULEU_S_PH_QBL
:
20931 gen_helper_muleu_s_ph_qbl(v1_t
, v1_t
, v2_t
, cpu_env
);
20932 gen_store_gpr(v1_t
, ret
);
20934 case NM_MULEU_S_PH_QBR
:
20936 gen_helper_muleu_s_ph_qbr(v1_t
, v1_t
, v2_t
, cpu_env
);
20937 gen_store_gpr(v1_t
, ret
);
20939 case NM_MULQ_RS_PH
:
20941 gen_helper_mulq_rs_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20942 gen_store_gpr(v1_t
, ret
);
20946 gen_helper_mulq_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
20947 gen_store_gpr(v1_t
, ret
);
20951 gen_helper_mulq_rs_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20952 gen_store_gpr(v1_t
, ret
);
20956 gen_helper_mulq_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20957 gen_store_gpr(v1_t
, ret
);
20961 gen_load_gpr(t0
, rs
);
20963 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], rd
, 32 - rd
);
20965 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
20969 gen_helper_modsub(v1_t
, v1_t
, v2_t
);
20970 gen_store_gpr(v1_t
, ret
);
20974 gen_helper_shra_r_w(v1_t
, v1_t
, v2_t
);
20975 gen_store_gpr(v1_t
, ret
);
20979 gen_helper_shrl_ph(v1_t
, v1_t
, v2_t
);
20980 gen_store_gpr(v1_t
, ret
);
20984 gen_helper_shrl_qb(v1_t
, v1_t
, v2_t
);
20985 gen_store_gpr(v1_t
, ret
);
20989 gen_helper_shll_qb(v1_t
, v1_t
, v2_t
, cpu_env
);
20990 gen_store_gpr(v1_t
, ret
);
20994 gen_helper_shll_s_w(v1_t
, v1_t
, v2_t
, cpu_env
);
20995 gen_store_gpr(v1_t
, ret
);
21000 TCGv tv0
= tcg_temp_new();
21001 TCGv tv1
= tcg_temp_new();
21002 int16_t imm
= extract32(ctx
->opcode
, 16, 7);
21004 tcg_gen_movi_tl(tv0
, rd
>> 3);
21005 tcg_gen_movi_tl(tv1
, imm
);
21006 gen_helper_shilo(tv0
, tv1
, cpu_env
);
21009 case NM_MULEQ_S_W_PHL
:
21011 gen_helper_muleq_s_w_phl(v1_t
, v1_t
, v2_t
, cpu_env
);
21012 gen_store_gpr(v1_t
, ret
);
21014 case NM_MULEQ_S_W_PHR
:
21016 gen_helper_muleq_s_w_phr(v1_t
, v1_t
, v2_t
, cpu_env
);
21017 gen_store_gpr(v1_t
, ret
);
21021 switch (extract32(ctx
->opcode
, 10, 1)) {
21024 gen_helper_mul_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21025 gen_store_gpr(v1_t
, ret
);
21029 gen_helper_mul_s_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21030 gen_store_gpr(v1_t
, ret
);
21034 case NM_PRECR_QB_PH
:
21036 gen_helper_precr_qb_ph(v1_t
, v1_t
, v2_t
);
21037 gen_store_gpr(v1_t
, ret
);
21039 case NM_PRECRQ_QB_PH
:
21041 gen_helper_precrq_qb_ph(v1_t
, v1_t
, v2_t
);
21042 gen_store_gpr(v1_t
, ret
);
21044 case NM_PRECRQ_PH_W
:
21046 gen_helper_precrq_ph_w(v1_t
, v1_t
, v2_t
);
21047 gen_store_gpr(v1_t
, ret
);
21049 case NM_PRECRQ_RS_PH_W
:
21051 gen_helper_precrq_rs_ph_w(v1_t
, v1_t
, v2_t
, cpu_env
);
21052 gen_store_gpr(v1_t
, ret
);
21054 case NM_PRECRQU_S_QB_PH
:
21056 gen_helper_precrqu_s_qb_ph(v1_t
, v1_t
, v2_t
, cpu_env
);
21057 gen_store_gpr(v1_t
, ret
);
21061 tcg_gen_movi_tl(t0
, rd
);
21062 gen_helper_shra_r_w(v1_t
, t0
, v1_t
);
21063 gen_store_gpr(v1_t
, rt
);
21067 tcg_gen_movi_tl(t0
, rd
>> 1);
21068 switch (extract32(ctx
->opcode
, 10, 1)) {
21071 gen_helper_shra_ph(v1_t
, t0
, v1_t
);
21072 gen_store_gpr(v1_t
, rt
);
21076 gen_helper_shra_r_ph(v1_t
, t0
, v1_t
);
21077 gen_store_gpr(v1_t
, rt
);
21083 tcg_gen_movi_tl(t0
, rd
>> 1);
21084 switch (extract32(ctx
->opcode
, 10, 2)) {
21087 gen_helper_shll_ph(v1_t
, t0
, v1_t
, cpu_env
);
21088 gen_store_gpr(v1_t
, rt
);
21092 gen_helper_shll_s_ph(v1_t
, t0
, v1_t
, cpu_env
);
21093 gen_store_gpr(v1_t
, rt
);
21096 gen_reserved_instruction(ctx
);
21102 tcg_gen_movi_tl(t0
, rd
);
21103 gen_helper_shll_s_w(v1_t
, t0
, v1_t
, cpu_env
);
21104 gen_store_gpr(v1_t
, rt
);
21110 imm
= sextract32(ctx
->opcode
, 11, 11);
21111 imm
= (int16_t)(imm
<< 6) >> 6;
21113 tcg_gen_movi_tl(cpu_gpr
[rt
], dup_const(MO_16
, imm
));
21118 gen_reserved_instruction(ctx
);
21123 static int decode_nanomips_32_48_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
21131 insn
= translator_lduw(env
, ctx
->base
.pc_next
+ 2);
21132 ctx
->opcode
= (ctx
->opcode
<< 16) | insn
;
21134 rt
= extract32(ctx
->opcode
, 21, 5);
21135 rs
= extract32(ctx
->opcode
, 16, 5);
21136 rd
= extract32(ctx
->opcode
, 11, 5);
21138 op
= extract32(ctx
->opcode
, 26, 6);
21143 switch (extract32(ctx
->opcode
, 19, 2)) {
21146 gen_reserved_instruction(ctx
);
21149 if ((extract32(ctx
->opcode
, 18, 1)) == NM_SYSCALL
) {
21150 generate_exception_end(ctx
, EXCP_SYSCALL
);
21152 gen_reserved_instruction(ctx
);
21156 generate_exception_end(ctx
, EXCP_BREAK
);
21159 if (is_uhi(extract32(ctx
->opcode
, 0, 19))) {
21160 gen_helper_do_semihosting(cpu_env
);
21162 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
21163 gen_reserved_instruction(ctx
);
21165 generate_exception_end(ctx
, EXCP_DBp
);
21172 imm
= extract32(ctx
->opcode
, 0, 16);
21174 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rs
], imm
);
21176 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
21178 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21183 offset
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21184 extract32(ctx
->opcode
, 1, 20) << 1;
21185 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21186 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21190 switch (ctx
->opcode
& 0x07) {
21192 gen_pool32a0_nanomips_insn(env
, ctx
);
21196 int32_t op1
= extract32(ctx
->opcode
, 3, 7);
21197 gen_pool32a5_nanomips_insn(ctx
, op1
, rd
, rs
, rt
);
21201 switch (extract32(ctx
->opcode
, 3, 3)) {
21203 gen_p_lsx(ctx
, rd
, rs
, rt
);
21207 * In nanoMIPS, the shift field directly encodes the shift
21208 * amount, meaning that the supported shift values are in
21209 * the range 0 to 3 (instead of 1 to 4 in MIPSR6).
21211 gen_lsa(ctx
, rd
, rt
, rs
, extract32(ctx
->opcode
, 9, 2) - 1);
21214 gen_ext(ctx
, 32, rd
, rs
, rt
, extract32(ctx
->opcode
, 6, 5));
21217 gen_pool32axf_nanomips_insn(env
, ctx
);
21220 gen_reserved_instruction(ctx
);
21225 gen_reserved_instruction(ctx
);
21230 switch (ctx
->opcode
& 0x03) {
21233 offset
= extract32(ctx
->opcode
, 0, 21);
21234 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], offset
);
21238 gen_ld(ctx
, OPC_LW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21241 gen_st(ctx
, OPC_SW
, rt
, 28, extract32(ctx
->opcode
, 2, 19) << 2);
21244 gen_reserved_instruction(ctx
);
21250 insn
= translator_lduw(env
, ctx
->base
.pc_next
+ 4);
21251 target_long addr_off
= extract32(ctx
->opcode
, 0, 16) | insn
<< 16;
21252 switch (extract32(ctx
->opcode
, 16, 5)) {
21256 tcg_gen_movi_tl(cpu_gpr
[rt
], addr_off
);
21262 tcg_gen_addi_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], addr_off
);
21263 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
21269 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], addr_off
);
21275 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21278 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21285 t0
= tcg_temp_new();
21287 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21290 tcg_gen_movi_tl(t0
, addr
);
21291 tcg_gen_qemu_ld_tl(cpu_gpr
[rt
], t0
, ctx
->mem_idx
, MO_TESL
);
21299 t0
= tcg_temp_new();
21300 t1
= tcg_temp_new();
21302 target_long addr
= addr_add(ctx
, ctx
->base
.pc_next
+ 6,
21305 tcg_gen_movi_tl(t0
, addr
);
21306 gen_load_gpr(t1
, rt
);
21308 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUL
);
21315 gen_reserved_instruction(ctx
);
21321 switch (extract32(ctx
->opcode
, 12, 4)) {
21323 gen_logic_imm(ctx
, OPC_ORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21326 gen_logic_imm(ctx
, OPC_XORI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21329 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21332 switch (extract32(ctx
->opcode
, 20, 1)) {
21334 switch (ctx
->opcode
& 3) {
21336 gen_save(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21337 extract32(ctx
->opcode
, 2, 1),
21338 extract32(ctx
->opcode
, 3, 9) << 3);
21341 case NM_RESTORE_JRC
:
21342 gen_restore(ctx
, rt
, extract32(ctx
->opcode
, 16, 4),
21343 extract32(ctx
->opcode
, 2, 1),
21344 extract32(ctx
->opcode
, 3, 9) << 3);
21345 if ((ctx
->opcode
& 3) == NM_RESTORE_JRC
) {
21346 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
21350 gen_reserved_instruction(ctx
);
21355 gen_reserved_instruction(ctx
);
21360 gen_slt_imm(ctx
, OPC_SLTI
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21363 gen_slt_imm(ctx
, OPC_SLTIU
, rt
, rs
, extract32(ctx
->opcode
, 0, 12));
21367 TCGv t0
= tcg_temp_new();
21369 imm
= extract32(ctx
->opcode
, 0, 12);
21370 gen_load_gpr(t0
, rs
);
21371 tcg_gen_setcondi_tl(TCG_COND_EQ
, t0
, t0
, imm
);
21372 gen_store_gpr(t0
, rt
);
21378 imm
= (int16_t) extract32(ctx
->opcode
, 0, 12);
21379 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, -imm
);
21383 int shift
= extract32(ctx
->opcode
, 0, 5);
21384 switch (extract32(ctx
->opcode
, 5, 4)) {
21386 if (rt
== 0 && shift
== 0) {
21388 } else if (rt
== 0 && shift
== 3) {
21389 /* EHB - treat as NOP */
21390 } else if (rt
== 0 && shift
== 5) {
21391 /* PAUSE - treat as NOP */
21392 } else if (rt
== 0 && shift
== 6) {
21394 gen_sync(extract32(ctx
->opcode
, 16, 5));
21397 gen_shift_imm(ctx
, OPC_SLL
, rt
, rs
,
21398 extract32(ctx
->opcode
, 0, 5));
21402 gen_shift_imm(ctx
, OPC_SRL
, rt
, rs
,
21403 extract32(ctx
->opcode
, 0, 5));
21406 gen_shift_imm(ctx
, OPC_SRA
, rt
, rs
,
21407 extract32(ctx
->opcode
, 0, 5));
21410 gen_shift_imm(ctx
, OPC_ROTR
, rt
, rs
,
21411 extract32(ctx
->opcode
, 0, 5));
21419 TCGv t0
= tcg_temp_new();
21420 TCGv_i32 shift
= tcg_const_i32(extract32(ctx
->opcode
, 0, 5));
21421 TCGv_i32 shiftx
= tcg_const_i32(extract32(ctx
->opcode
, 7, 4)
21423 TCGv_i32 stripe
= tcg_const_i32(extract32(ctx
->opcode
, 6, 1));
21425 gen_load_gpr(t0
, rs
);
21426 gen_helper_rotx(cpu_gpr
[rt
], t0
, shift
, shiftx
, stripe
);
21429 tcg_temp_free_i32(shift
);
21430 tcg_temp_free_i32(shiftx
);
21431 tcg_temp_free_i32(stripe
);
21435 switch (((ctx
->opcode
>> 10) & 2) |
21436 (extract32(ctx
->opcode
, 5, 1))) {
21439 gen_bitops(ctx
, OPC_INS
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21440 extract32(ctx
->opcode
, 6, 5));
21443 gen_reserved_instruction(ctx
);
21448 switch (((ctx
->opcode
>> 10) & 2) |
21449 (extract32(ctx
->opcode
, 5, 1))) {
21452 gen_bitops(ctx
, OPC_EXT
, rt
, rs
, extract32(ctx
->opcode
, 0, 5),
21453 extract32(ctx
->opcode
, 6, 5));
21456 gen_reserved_instruction(ctx
);
21461 gen_reserved_instruction(ctx
);
21466 gen_pool32f_nanomips_insn(ctx
);
21471 switch (extract32(ctx
->opcode
, 1, 1)) {
21474 tcg_gen_movi_tl(cpu_gpr
[rt
],
21475 sextract32(ctx
->opcode
, 0, 1) << 31 |
21476 extract32(ctx
->opcode
, 2, 10) << 21 |
21477 extract32(ctx
->opcode
, 12, 9) << 12);
21482 offset
= sextract32(ctx
->opcode
, 0, 1) << 31 |
21483 extract32(ctx
->opcode
, 2, 10) << 21 |
21484 extract32(ctx
->opcode
, 12, 9) << 12;
21486 addr
= ~0xFFF & addr_add(ctx
, ctx
->base
.pc_next
+ 4, offset
);
21487 tcg_gen_movi_tl(cpu_gpr
[rt
], addr
);
21494 uint32_t u
= extract32(ctx
->opcode
, 0, 18);
21496 switch (extract32(ctx
->opcode
, 18, 3)) {
21498 gen_ld(ctx
, OPC_LB
, rt
, 28, u
);
21501 gen_st(ctx
, OPC_SB
, rt
, 28, u
);
21504 gen_ld(ctx
, OPC_LBU
, rt
, 28, u
);
21508 gen_op_addr_addi(ctx
, cpu_gpr
[rt
], cpu_gpr
[28], u
);
21513 switch (ctx
->opcode
& 1) {
21515 gen_ld(ctx
, OPC_LH
, rt
, 28, u
);
21518 gen_ld(ctx
, OPC_LHU
, rt
, 28, u
);
21524 switch (ctx
->opcode
& 1) {
21526 gen_st(ctx
, OPC_SH
, rt
, 28, u
);
21529 gen_reserved_instruction(ctx
);
21535 switch (ctx
->opcode
& 0x3) {
21537 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, 28, u
);
21540 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, 28, u
);
21543 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, 28, u
);
21546 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, 28, u
);
21551 gen_reserved_instruction(ctx
);
21558 uint32_t u
= extract32(ctx
->opcode
, 0, 12);
21560 switch (extract32(ctx
->opcode
, 12, 4)) {
21565 * Break the TB to be able to sync copied instructions
21568 ctx
->base
.is_jmp
= DISAS_STOP
;
21571 /* Treat as NOP. */
21575 gen_ld(ctx
, OPC_LB
, rt
, rs
, u
);
21578 gen_ld(ctx
, OPC_LH
, rt
, rs
, u
);
21581 gen_ld(ctx
, OPC_LW
, rt
, rs
, u
);
21584 gen_ld(ctx
, OPC_LBU
, rt
, rs
, u
);
21587 gen_ld(ctx
, OPC_LHU
, rt
, rs
, u
);
21590 gen_st(ctx
, OPC_SB
, rt
, rs
, u
);
21593 gen_st(ctx
, OPC_SH
, rt
, rs
, u
);
21596 gen_st(ctx
, OPC_SW
, rt
, rs
, u
);
21599 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, u
);
21602 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, u
);
21605 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, u
);
21608 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, u
);
21611 gen_reserved_instruction(ctx
);
21618 int32_t s
= (sextract32(ctx
->opcode
, 15, 1) << 8) |
21619 extract32(ctx
->opcode
, 0, 8);
21621 switch (extract32(ctx
->opcode
, 8, 3)) {
21623 switch (extract32(ctx
->opcode
, 11, 4)) {
21625 gen_ld(ctx
, OPC_LB
, rt
, rs
, s
);
21628 gen_ld(ctx
, OPC_LH
, rt
, rs
, s
);
21631 gen_ld(ctx
, OPC_LW
, rt
, rs
, s
);
21634 gen_ld(ctx
, OPC_LBU
, rt
, rs
, s
);
21637 gen_ld(ctx
, OPC_LHU
, rt
, rs
, s
);
21640 gen_st(ctx
, OPC_SB
, rt
, rs
, s
);
21643 gen_st(ctx
, OPC_SH
, rt
, rs
, s
);
21646 gen_st(ctx
, OPC_SW
, rt
, rs
, s
);
21649 gen_cop1_ldst(ctx
, OPC_LWC1
, rt
, rs
, s
);
21652 gen_cop1_ldst(ctx
, OPC_LDC1
, rt
, rs
, s
);
21655 gen_cop1_ldst(ctx
, OPC_SWC1
, rt
, rs
, s
);
21658 gen_cop1_ldst(ctx
, OPC_SDC1
, rt
, rs
, s
);
21664 * Break the TB to be able to sync copied instructions
21667 ctx
->base
.is_jmp
= DISAS_STOP
;
21670 /* Treat as NOP. */
21674 gen_reserved_instruction(ctx
);
21679 switch (extract32(ctx
->opcode
, 11, 4)) {
21684 TCGv t0
= tcg_temp_new();
21685 TCGv t1
= tcg_temp_new();
21687 gen_base_offset_addr(ctx
, t0
, rs
, s
);
21689 switch (extract32(ctx
->opcode
, 11, 4)) {
21691 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
|
21693 gen_store_gpr(t0
, rt
);
21696 gen_load_gpr(t1
, rt
);
21697 tcg_gen_qemu_st_tl(t1
, t0
, ctx
->mem_idx
, MO_TEUW
|
21706 switch (ctx
->opcode
& 0x03) {
21708 gen_ld(ctx
, OPC_LL
, rt
, rs
, s
);
21712 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21717 switch (ctx
->opcode
& 0x03) {
21719 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, false);
21723 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21729 check_cp0_enabled(ctx
);
21730 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
21731 gen_cache_operation(ctx
, rt
, rs
, s
);
21737 switch (extract32(ctx
->opcode
, 11, 4)) {
21740 check_cp0_enabled(ctx
);
21741 gen_ld(ctx
, OPC_LBE
, rt
, rs
, s
);
21745 check_cp0_enabled(ctx
);
21746 gen_st(ctx
, OPC_SBE
, rt
, rs
, s
);
21750 check_cp0_enabled(ctx
);
21751 gen_ld(ctx
, OPC_LBUE
, rt
, rs
, s
);
21755 /* case NM_SYNCIE */
21757 check_cp0_enabled(ctx
);
21759 * Break the TB to be able to sync copied instructions
21762 ctx
->base
.is_jmp
= DISAS_STOP
;
21764 /* case NM_PREFE */
21766 check_cp0_enabled(ctx
);
21767 /* Treat as NOP. */
21772 check_cp0_enabled(ctx
);
21773 gen_ld(ctx
, OPC_LHE
, rt
, rs
, s
);
21777 check_cp0_enabled(ctx
);
21778 gen_st(ctx
, OPC_SHE
, rt
, rs
, s
);
21782 check_cp0_enabled(ctx
);
21783 gen_ld(ctx
, OPC_LHUE
, rt
, rs
, s
);
21786 check_nms_dl_il_sl_tl_l2c(ctx
);
21787 gen_cache_operation(ctx
, rt
, rs
, s
);
21791 check_cp0_enabled(ctx
);
21792 gen_ld(ctx
, OPC_LWE
, rt
, rs
, s
);
21796 check_cp0_enabled(ctx
);
21797 gen_st(ctx
, OPC_SWE
, rt
, rs
, s
);
21800 switch (extract32(ctx
->opcode
, 2, 2)) {
21804 check_cp0_enabled(ctx
);
21805 gen_ld(ctx
, OPC_LLE
, rt
, rs
, s
);
21810 check_cp0_enabled(ctx
);
21811 gen_llwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5));
21814 gen_reserved_instruction(ctx
);
21819 switch (extract32(ctx
->opcode
, 2, 2)) {
21823 check_cp0_enabled(ctx
);
21824 gen_st_cond(ctx
, rt
, rs
, s
, MO_TESL
, true);
21829 check_cp0_enabled(ctx
);
21830 gen_scwp(ctx
, rs
, 0, rt
, extract32(ctx
->opcode
, 3, 5),
21834 gen_reserved_instruction(ctx
);
21844 int count
= extract32(ctx
->opcode
, 12, 3);
21847 offset
= sextract32(ctx
->opcode
, 15, 1) << 8 |
21848 extract32(ctx
->opcode
, 0, 8);
21849 TCGv va
= tcg_temp_new();
21850 TCGv t1
= tcg_temp_new();
21851 MemOp memop
= (extract32(ctx
->opcode
, 8, 3)) ==
21852 NM_P_LS_UAWM
? MO_UNALN
: 0;
21854 count
= (count
== 0) ? 8 : count
;
21855 while (counter
!= count
) {
21856 int this_rt
= ((rt
+ counter
) & 0x1f) | (rt
& 0x10);
21857 int this_offset
= offset
+ (counter
<< 2);
21859 gen_base_offset_addr(ctx
, va
, rs
, this_offset
);
21861 switch (extract32(ctx
->opcode
, 11, 1)) {
21863 tcg_gen_qemu_ld_tl(t1
, va
, ctx
->mem_idx
,
21865 gen_store_gpr(t1
, this_rt
);
21866 if ((this_rt
== rs
) &&
21867 (counter
!= (count
- 1))) {
21868 /* UNPREDICTABLE */
21872 this_rt
= (rt
== 0) ? 0 : this_rt
;
21873 gen_load_gpr(t1
, this_rt
);
21874 tcg_gen_qemu_st_tl(t1
, va
, ctx
->mem_idx
,
21885 gen_reserved_instruction(ctx
);
21893 TCGv t0
= tcg_temp_new();
21894 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 21 |
21895 extract32(ctx
->opcode
, 1, 20) << 1;
21896 rd
= (extract32(ctx
->opcode
, 24, 1)) == 0 ? 4 : 5;
21897 rt
= decode_gpr_gpr4_zero(extract32(ctx
->opcode
, 25, 1) << 3 |
21898 extract32(ctx
->opcode
, 21, 3));
21899 gen_load_gpr(t0
, rt
);
21900 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
21901 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21907 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 25 |
21908 extract32(ctx
->opcode
, 1, 24) << 1;
21910 if ((extract32(ctx
->opcode
, 25, 1)) == 0) {
21912 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, 0, 0, s
);
21915 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 4, 0, 0, s
);
21920 switch (extract32(ctx
->opcode
, 12, 4)) {
21923 gen_compute_branch_nm(ctx
, OPC_JALR
, 4, rs
, rt
, 0);
21926 gen_compute_nanomips_pbalrsc_branch(ctx
, rs
, rt
);
21929 gen_reserved_instruction(ctx
);
21935 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21936 extract32(ctx
->opcode
, 1, 13) << 1;
21937 switch (extract32(ctx
->opcode
, 14, 2)) {
21940 gen_compute_branch_nm(ctx
, OPC_BEQ
, 4, rs
, rt
, s
);
21943 s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21944 extract32(ctx
->opcode
, 1, 13) << 1;
21945 check_cp1_enabled(ctx
);
21946 switch (extract32(ctx
->opcode
, 16, 5)) {
21948 gen_compute_branch_cp1_nm(ctx
, OPC_BC1EQZ
, rt
, s
);
21951 gen_compute_branch_cp1_nm(ctx
, OPC_BC1NEZ
, rt
, s
);
21956 int32_t imm
= extract32(ctx
->opcode
, 1, 13) |
21957 extract32(ctx
->opcode
, 0, 1) << 13;
21959 gen_compute_branch_nm(ctx
, OPC_BPOSGE32
, 4, -1, -2,
21964 gen_reserved_instruction(ctx
);
21970 gen_compute_compact_branch_nm(ctx
, OPC_BC
, rs
, rt
, s
);
21972 gen_compute_compact_branch_nm(ctx
, OPC_BGEC
, rs
, rt
, s
);
21976 if (rs
== rt
|| rt
== 0) {
21977 gen_compute_compact_branch_nm(ctx
, OPC_BC
, 0, 0, s
);
21978 } else if (rs
== 0) {
21979 gen_compute_compact_branch_nm(ctx
, OPC_BEQZC
, rt
, 0, s
);
21981 gen_compute_compact_branch_nm(ctx
, OPC_BGEUC
, rs
, rt
, s
);
21989 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 14 |
21990 extract32(ctx
->opcode
, 1, 13) << 1;
21991 switch (extract32(ctx
->opcode
, 14, 2)) {
21994 gen_compute_branch_nm(ctx
, OPC_BNE
, 4, rs
, rt
, s
);
21997 if (rs
!= 0 && rt
!= 0 && rs
== rt
) {
21999 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
22001 gen_compute_compact_branch_nm(ctx
, OPC_BLTC
, rs
, rt
, s
);
22005 if (rs
== 0 || rs
== rt
) {
22007 ctx
->hflags
|= MIPS_HFLAG_FBNSLOT
;
22009 gen_compute_compact_branch_nm(ctx
, OPC_BLTUC
, rs
, rt
, s
);
22013 gen_reserved_instruction(ctx
);
22020 int32_t s
= sextract32(ctx
->opcode
, 0, 1) << 11 |
22021 extract32(ctx
->opcode
, 1, 10) << 1;
22022 uint32_t u
= extract32(ctx
->opcode
, 11, 7);
22024 gen_compute_imm_branch(ctx
, extract32(ctx
->opcode
, 18, 3),
22029 gen_reserved_instruction(ctx
);
22035 static int decode_nanomips_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
22038 int rt
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22039 int rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22040 int rd
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RD3(ctx
->opcode
));
22044 /* make sure instructions are on a halfword boundary */
22045 if (ctx
->base
.pc_next
& 0x1) {
22046 TCGv tmp
= tcg_const_tl(ctx
->base
.pc_next
);
22047 tcg_gen_st_tl(tmp
, cpu_env
, offsetof(CPUMIPSState
, CP0_BadVAddr
));
22048 tcg_temp_free(tmp
);
22049 generate_exception_end(ctx
, EXCP_AdEL
);
22053 op
= extract32(ctx
->opcode
, 10, 6);
22056 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22059 rs
= NANOMIPS_EXTRACT_RS5(ctx
->opcode
);
22060 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, 0);
22063 switch (extract32(ctx
->opcode
, 3, 2)) {
22064 case NM_P16_SYSCALL
:
22065 if (extract32(ctx
->opcode
, 2, 1) == 0) {
22066 generate_exception_end(ctx
, EXCP_SYSCALL
);
22068 gen_reserved_instruction(ctx
);
22072 generate_exception_end(ctx
, EXCP_BREAK
);
22075 if (is_uhi(extract32(ctx
->opcode
, 0, 3))) {
22076 gen_helper_do_semihosting(cpu_env
);
22078 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
22079 gen_reserved_instruction(ctx
);
22081 generate_exception_end(ctx
, EXCP_DBp
);
22086 gen_reserved_instruction(ctx
);
22093 int shift
= extract32(ctx
->opcode
, 0, 3);
22095 shift
= (shift
== 0) ? 8 : shift
;
22097 switch (extract32(ctx
->opcode
, 3, 1)) {
22105 gen_shift_imm(ctx
, opc
, rt
, rs
, shift
);
22109 switch (ctx
->opcode
& 1) {
22111 gen_pool16c_nanomips_insn(ctx
);
22114 gen_ldxs(ctx
, rt
, rs
, rd
);
22119 switch (extract32(ctx
->opcode
, 6, 1)) {
22121 imm
= extract32(ctx
->opcode
, 0, 6) << 2;
22122 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, 29, imm
);
22125 gen_reserved_instruction(ctx
);
22130 switch (extract32(ctx
->opcode
, 3, 1)) {
22132 imm
= extract32(ctx
->opcode
, 0, 3) << 2;
22133 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rs
, imm
);
22135 case NM_P_ADDIURS5
:
22136 rt
= extract32(ctx
->opcode
, 5, 5);
22138 /* imm = sign_extend(s[3] . s[2:0] , from_nbits = 4) */
22139 imm
= (sextract32(ctx
->opcode
, 4, 1) << 3) |
22140 (extract32(ctx
->opcode
, 0, 3));
22141 gen_arith_imm(ctx
, OPC_ADDIU
, rt
, rt
, imm
);
22147 switch (ctx
->opcode
& 0x1) {
22149 gen_arith(ctx
, OPC_ADDU
, rd
, rs
, rt
);
22152 gen_arith(ctx
, OPC_SUBU
, rd
, rs
, rt
);
22157 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22158 extract32(ctx
->opcode
, 5, 3);
22159 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22160 extract32(ctx
->opcode
, 0, 3);
22161 rt
= decode_gpr_gpr4(rt
);
22162 rs
= decode_gpr_gpr4(rs
);
22163 switch ((extract32(ctx
->opcode
, 7, 2) & 0x2) |
22164 (extract32(ctx
->opcode
, 3, 1))) {
22167 gen_arith(ctx
, OPC_ADDU
, rt
, rs
, rt
);
22171 gen_r6_muldiv(ctx
, R6_OPC_MUL
, rt
, rs
, rt
);
22174 gen_reserved_instruction(ctx
);
22180 int imm
= extract32(ctx
->opcode
, 0, 7);
22181 imm
= (imm
== 0x7f ? -1 : imm
);
22183 tcg_gen_movi_tl(cpu_gpr
[rt
], imm
);
22189 uint32_t u
= extract32(ctx
->opcode
, 0, 4);
22190 u
= (u
== 12) ? 0xff :
22191 (u
== 13) ? 0xffff : u
;
22192 gen_logic_imm(ctx
, OPC_ANDI
, rt
, rs
, u
);
22196 offset
= extract32(ctx
->opcode
, 0, 2);
22197 switch (extract32(ctx
->opcode
, 2, 2)) {
22199 gen_ld(ctx
, OPC_LB
, rt
, rs
, offset
);
22202 rt
= decode_gpr_gpr3_src_store(
22203 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22204 gen_st(ctx
, OPC_SB
, rt
, rs
, offset
);
22207 gen_ld(ctx
, OPC_LBU
, rt
, rs
, offset
);
22210 gen_reserved_instruction(ctx
);
22215 offset
= extract32(ctx
->opcode
, 1, 2) << 1;
22216 switch ((extract32(ctx
->opcode
, 3, 1) << 1) | (ctx
->opcode
& 1)) {
22218 gen_ld(ctx
, OPC_LH
, rt
, rs
, offset
);
22221 rt
= decode_gpr_gpr3_src_store(
22222 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22223 gen_st(ctx
, OPC_SH
, rt
, rs
, offset
);
22226 gen_ld(ctx
, OPC_LHU
, rt
, rs
, offset
);
22229 gen_reserved_instruction(ctx
);
22234 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22235 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22238 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22239 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22240 gen_ld(ctx
, OPC_LW
, rt
, 29, offset
);
22244 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22245 extract32(ctx
->opcode
, 5, 3);
22246 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22247 extract32(ctx
->opcode
, 0, 3);
22248 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22249 (extract32(ctx
->opcode
, 8, 1) << 2);
22250 rt
= decode_gpr_gpr4(rt
);
22251 rs
= decode_gpr_gpr4(rs
);
22252 gen_ld(ctx
, OPC_LW
, rt
, rs
, offset
);
22256 rt
= (extract32(ctx
->opcode
, 9, 1) << 3) |
22257 extract32(ctx
->opcode
, 5, 3);
22258 rs
= (extract32(ctx
->opcode
, 4, 1) << 3) |
22259 extract32(ctx
->opcode
, 0, 3);
22260 offset
= (extract32(ctx
->opcode
, 3, 1) << 3) |
22261 (extract32(ctx
->opcode
, 8, 1) << 2);
22262 rt
= decode_gpr_gpr4_zero(rt
);
22263 rs
= decode_gpr_gpr4(rs
);
22264 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22267 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22268 gen_ld(ctx
, OPC_LW
, rt
, 28, offset
);
22271 rt
= NANOMIPS_EXTRACT_RD5(ctx
->opcode
);
22272 offset
= extract32(ctx
->opcode
, 0, 5) << 2;
22273 gen_st(ctx
, OPC_SW
, rt
, 29, offset
);
22276 rt
= decode_gpr_gpr3_src_store(
22277 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22278 rs
= decode_gpr_gpr3(NANOMIPS_EXTRACT_RS3(ctx
->opcode
));
22279 offset
= extract32(ctx
->opcode
, 0, 4) << 2;
22280 gen_st(ctx
, OPC_SW
, rt
, rs
, offset
);
22283 rt
= decode_gpr_gpr3_src_store(
22284 NANOMIPS_EXTRACT_RT3(ctx
->opcode
));
22285 offset
= extract32(ctx
->opcode
, 0, 7) << 2;
22286 gen_st(ctx
, OPC_SW
, rt
, 28, offset
);
22289 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, 0, 0,
22290 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22291 (extract32(ctx
->opcode
, 1, 9) << 1));
22294 gen_compute_branch_nm(ctx
, OPC_BGEZAL
, 2, 0, 0,
22295 (sextract32(ctx
->opcode
, 0, 1) << 10) |
22296 (extract32(ctx
->opcode
, 1, 9) << 1));
22299 gen_compute_branch_nm(ctx
, OPC_BEQ
, 2, rt
, 0,
22300 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22301 (extract32(ctx
->opcode
, 1, 6) << 1));
22304 gen_compute_branch_nm(ctx
, OPC_BNE
, 2, rt
, 0,
22305 (sextract32(ctx
->opcode
, 0, 1) << 7) |
22306 (extract32(ctx
->opcode
, 1, 6) << 1));
22309 switch (ctx
->opcode
& 0xf) {
22312 switch (extract32(ctx
->opcode
, 4, 1)) {
22314 gen_compute_branch_nm(ctx
, OPC_JR
, 2,
22315 extract32(ctx
->opcode
, 5, 5), 0, 0);
22318 gen_compute_branch_nm(ctx
, OPC_JALR
, 2,
22319 extract32(ctx
->opcode
, 5, 5), 31, 0);
22326 uint32_t opc
= extract32(ctx
->opcode
, 4, 3) <
22327 extract32(ctx
->opcode
, 7, 3) ? OPC_BEQ
: OPC_BNE
;
22328 gen_compute_branch_nm(ctx
, opc
, 2, rs
, rt
,
22329 extract32(ctx
->opcode
, 0, 4) << 1);
22336 int count
= extract32(ctx
->opcode
, 0, 4);
22337 int u
= extract32(ctx
->opcode
, 4, 4) << 4;
22339 rt
= 30 + extract32(ctx
->opcode
, 9, 1);
22340 switch (extract32(ctx
->opcode
, 8, 1)) {
22342 gen_save(ctx
, rt
, count
, 0, u
);
22344 case NM_RESTORE_JRC16
:
22345 gen_restore(ctx
, rt
, count
, 0, u
);
22346 gen_compute_branch_nm(ctx
, OPC_JR
, 2, 31, 0, 0);
22355 static const int gpr2reg1
[] = {4, 5, 6, 7};
22356 static const int gpr2reg2
[] = {5, 6, 7, 8};
22358 int rd2
= extract32(ctx
->opcode
, 3, 1) << 1 |
22359 extract32(ctx
->opcode
, 8, 1);
22360 int r1
= gpr2reg1
[rd2
];
22361 int r2
= gpr2reg2
[rd2
];
22362 int r3
= extract32(ctx
->opcode
, 4, 1) << 3 |
22363 extract32(ctx
->opcode
, 0, 3);
22364 int r4
= extract32(ctx
->opcode
, 9, 1) << 3 |
22365 extract32(ctx
->opcode
, 5, 3);
22366 TCGv t0
= tcg_temp_new();
22367 TCGv t1
= tcg_temp_new();
22368 if (op
== NM_MOVEP
) {
22371 rs
= decode_gpr_gpr4_zero(r3
);
22372 rt
= decode_gpr_gpr4_zero(r4
);
22374 rd
= decode_gpr_gpr4(r3
);
22375 re
= decode_gpr_gpr4(r4
);
22379 gen_load_gpr(t0
, rs
);
22380 gen_load_gpr(t1
, rt
);
22381 tcg_gen_mov_tl(cpu_gpr
[rd
], t0
);
22382 tcg_gen_mov_tl(cpu_gpr
[re
], t1
);
22388 return decode_nanomips_32_48_opc(env
, ctx
);
22395 /* SmartMIPS extension to MIPS32 */
22397 #if defined(TARGET_MIPS64)
22399 /* MDMX extension to MIPS64 */
22403 /* MIPSDSP functions. */
22404 static void gen_mipsdsp_ld(DisasContext
*ctx
, uint32_t opc
,
22405 int rd
, int base
, int offset
)
22410 t0
= tcg_temp_new();
22413 gen_load_gpr(t0
, offset
);
22414 } else if (offset
== 0) {
22415 gen_load_gpr(t0
, base
);
22417 gen_op_addr_add(ctx
, t0
, cpu_gpr
[base
], cpu_gpr
[offset
]);
22422 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_UB
);
22423 gen_store_gpr(t0
, rd
);
22426 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESW
);
22427 gen_store_gpr(t0
, rd
);
22430 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TESL
);
22431 gen_store_gpr(t0
, rd
);
22433 #if defined(TARGET_MIPS64)
22435 tcg_gen_qemu_ld_tl(t0
, t0
, ctx
->mem_idx
, MO_TEQ
);
22436 gen_store_gpr(t0
, rd
);
22443 static void gen_mipsdsp_arith(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
22444 int ret
, int v1
, int v2
)
22450 /* Treat as NOP. */
22454 v1_t
= tcg_temp_new();
22455 v2_t
= tcg_temp_new();
22457 gen_load_gpr(v1_t
, v1
);
22458 gen_load_gpr(v2_t
, v2
);
22461 /* OPC_MULT_G_2E is equal OPC_ADDUH_QB_DSP */
22462 case OPC_MULT_G_2E
:
22466 gen_helper_adduh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22468 case OPC_ADDUH_R_QB
:
22469 gen_helper_adduh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22472 gen_helper_addqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22474 case OPC_ADDQH_R_PH
:
22475 gen_helper_addqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22478 gen_helper_addqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22480 case OPC_ADDQH_R_W
:
22481 gen_helper_addqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22484 gen_helper_subuh_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22486 case OPC_SUBUH_R_QB
:
22487 gen_helper_subuh_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22490 gen_helper_subqh_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22492 case OPC_SUBQH_R_PH
:
22493 gen_helper_subqh_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22496 gen_helper_subqh_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22498 case OPC_SUBQH_R_W
:
22499 gen_helper_subqh_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22503 case OPC_ABSQ_S_PH_DSP
:
22505 case OPC_ABSQ_S_QB
:
22507 gen_helper_absq_s_qb(cpu_gpr
[ret
], v2_t
, cpu_env
);
22509 case OPC_ABSQ_S_PH
:
22511 gen_helper_absq_s_ph(cpu_gpr
[ret
], v2_t
, cpu_env
);
22515 gen_helper_absq_s_w(cpu_gpr
[ret
], v2_t
, cpu_env
);
22517 case OPC_PRECEQ_W_PHL
:
22519 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFF0000);
22520 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22522 case OPC_PRECEQ_W_PHR
:
22524 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0x0000FFFF);
22525 tcg_gen_shli_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], 16);
22526 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
22528 case OPC_PRECEQU_PH_QBL
:
22530 gen_helper_precequ_ph_qbl(cpu_gpr
[ret
], v2_t
);
22532 case OPC_PRECEQU_PH_QBR
:
22534 gen_helper_precequ_ph_qbr(cpu_gpr
[ret
], v2_t
);
22536 case OPC_PRECEQU_PH_QBLA
:
22538 gen_helper_precequ_ph_qbla(cpu_gpr
[ret
], v2_t
);
22540 case OPC_PRECEQU_PH_QBRA
:
22542 gen_helper_precequ_ph_qbra(cpu_gpr
[ret
], v2_t
);
22544 case OPC_PRECEU_PH_QBL
:
22546 gen_helper_preceu_ph_qbl(cpu_gpr
[ret
], v2_t
);
22548 case OPC_PRECEU_PH_QBR
:
22550 gen_helper_preceu_ph_qbr(cpu_gpr
[ret
], v2_t
);
22552 case OPC_PRECEU_PH_QBLA
:
22554 gen_helper_preceu_ph_qbla(cpu_gpr
[ret
], v2_t
);
22556 case OPC_PRECEU_PH_QBRA
:
22558 gen_helper_preceu_ph_qbra(cpu_gpr
[ret
], v2_t
);
22562 case OPC_ADDU_QB_DSP
:
22566 gen_helper_addq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22568 case OPC_ADDQ_S_PH
:
22570 gen_helper_addq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22574 gen_helper_addq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22578 gen_helper_addu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22580 case OPC_ADDU_S_QB
:
22582 gen_helper_addu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22586 gen_helper_addu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22588 case OPC_ADDU_S_PH
:
22590 gen_helper_addu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22594 gen_helper_subq_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22596 case OPC_SUBQ_S_PH
:
22598 gen_helper_subq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22602 gen_helper_subq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22606 gen_helper_subu_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22608 case OPC_SUBU_S_QB
:
22610 gen_helper_subu_s_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22614 gen_helper_subu_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22616 case OPC_SUBU_S_PH
:
22618 gen_helper_subu_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22622 gen_helper_addsc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22626 gen_helper_addwc(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22630 gen_helper_modsub(cpu_gpr
[ret
], v1_t
, v2_t
);
22632 case OPC_RADDU_W_QB
:
22634 gen_helper_raddu_w_qb(cpu_gpr
[ret
], v1_t
);
22638 case OPC_CMPU_EQ_QB_DSP
:
22640 case OPC_PRECR_QB_PH
:
22642 gen_helper_precr_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22644 case OPC_PRECRQ_QB_PH
:
22646 gen_helper_precrq_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22648 case OPC_PRECR_SRA_PH_W
:
22651 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22652 gen_helper_precr_sra_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22654 tcg_temp_free_i32(sa_t
);
22657 case OPC_PRECR_SRA_R_PH_W
:
22660 TCGv_i32 sa_t
= tcg_const_i32(v2
);
22661 gen_helper_precr_sra_r_ph_w(cpu_gpr
[ret
], sa_t
, v1_t
,
22663 tcg_temp_free_i32(sa_t
);
22666 case OPC_PRECRQ_PH_W
:
22668 gen_helper_precrq_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
);
22670 case OPC_PRECRQ_RS_PH_W
:
22672 gen_helper_precrq_rs_ph_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22674 case OPC_PRECRQU_S_QB_PH
:
22676 gen_helper_precrqu_s_qb_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22680 #ifdef TARGET_MIPS64
22681 case OPC_ABSQ_S_QH_DSP
:
22683 case OPC_PRECEQ_L_PWL
:
22685 tcg_gen_andi_tl(cpu_gpr
[ret
], v2_t
, 0xFFFFFFFF00000000ull
);
22687 case OPC_PRECEQ_L_PWR
:
22689 tcg_gen_shli_tl(cpu_gpr
[ret
], v2_t
, 32);
22691 case OPC_PRECEQ_PW_QHL
:
22693 gen_helper_preceq_pw_qhl(cpu_gpr
[ret
], v2_t
);
22695 case OPC_PRECEQ_PW_QHR
:
22697 gen_helper_preceq_pw_qhr(cpu_gpr
[ret
], v2_t
);
22699 case OPC_PRECEQ_PW_QHLA
:
22701 gen_helper_preceq_pw_qhla(cpu_gpr
[ret
], v2_t
);
22703 case OPC_PRECEQ_PW_QHRA
:
22705 gen_helper_preceq_pw_qhra(cpu_gpr
[ret
], v2_t
);
22707 case OPC_PRECEQU_QH_OBL
:
22709 gen_helper_precequ_qh_obl(cpu_gpr
[ret
], v2_t
);
22711 case OPC_PRECEQU_QH_OBR
:
22713 gen_helper_precequ_qh_obr(cpu_gpr
[ret
], v2_t
);
22715 case OPC_PRECEQU_QH_OBLA
:
22717 gen_helper_precequ_qh_obla(cpu_gpr
[ret
], v2_t
);
22719 case OPC_PRECEQU_QH_OBRA
:
22721 gen_helper_precequ_qh_obra(cpu_gpr
[ret
], v2_t
);
22723 case OPC_PRECEU_QH_OBL
:
22725 gen_helper_preceu_qh_obl(cpu_gpr
[ret
], v2_t
);
22727 case OPC_PRECEU_QH_OBR
:
22729 gen_helper_preceu_qh_obr(cpu_gpr
[ret
], v2_t
);
22731 case OPC_PRECEU_QH_OBLA
:
22733 gen_helper_preceu_qh_obla(cpu_gpr
[ret
], v2_t
);
22735 case OPC_PRECEU_QH_OBRA
:
22737 gen_helper_preceu_qh_obra(cpu_gpr
[ret
], v2_t
);
22739 case OPC_ABSQ_S_OB
:
22741 gen_helper_absq_s_ob(cpu_gpr
[ret
], v2_t
, cpu_env
);
22743 case OPC_ABSQ_S_PW
:
22745 gen_helper_absq_s_pw(cpu_gpr
[ret
], v2_t
, cpu_env
);
22747 case OPC_ABSQ_S_QH
:
22749 gen_helper_absq_s_qh(cpu_gpr
[ret
], v2_t
, cpu_env
);
22753 case OPC_ADDU_OB_DSP
:
22755 case OPC_RADDU_L_OB
:
22757 gen_helper_raddu_l_ob(cpu_gpr
[ret
], v1_t
);
22761 gen_helper_subq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22763 case OPC_SUBQ_S_PW
:
22765 gen_helper_subq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22769 gen_helper_subq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22771 case OPC_SUBQ_S_QH
:
22773 gen_helper_subq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22777 gen_helper_subu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22779 case OPC_SUBU_S_OB
:
22781 gen_helper_subu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22785 gen_helper_subu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22787 case OPC_SUBU_S_QH
:
22789 gen_helper_subu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22793 gen_helper_subuh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22795 case OPC_SUBUH_R_OB
:
22797 gen_helper_subuh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22801 gen_helper_addq_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22803 case OPC_ADDQ_S_PW
:
22805 gen_helper_addq_s_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22809 gen_helper_addq_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22811 case OPC_ADDQ_S_QH
:
22813 gen_helper_addq_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22817 gen_helper_addu_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22819 case OPC_ADDU_S_OB
:
22821 gen_helper_addu_s_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22825 gen_helper_addu_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22827 case OPC_ADDU_S_QH
:
22829 gen_helper_addu_s_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22833 gen_helper_adduh_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22835 case OPC_ADDUH_R_OB
:
22837 gen_helper_adduh_r_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
22841 case OPC_CMPU_EQ_OB_DSP
:
22843 case OPC_PRECR_OB_QH
:
22845 gen_helper_precr_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22847 case OPC_PRECR_SRA_QH_PW
:
22850 TCGv_i32 ret_t
= tcg_const_i32(ret
);
22851 gen_helper_precr_sra_qh_pw(v2_t
, v1_t
, v2_t
, ret_t
);
22852 tcg_temp_free_i32(ret_t
);
22855 case OPC_PRECR_SRA_R_QH_PW
:
22858 TCGv_i32 sa_v
= tcg_const_i32(ret
);
22859 gen_helper_precr_sra_r_qh_pw(v2_t
, v1_t
, v2_t
, sa_v
);
22860 tcg_temp_free_i32(sa_v
);
22863 case OPC_PRECRQ_OB_QH
:
22865 gen_helper_precrq_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
);
22867 case OPC_PRECRQ_PW_L
:
22869 gen_helper_precrq_pw_l(cpu_gpr
[ret
], v1_t
, v2_t
);
22871 case OPC_PRECRQ_QH_PW
:
22873 gen_helper_precrq_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
22875 case OPC_PRECRQ_RS_QH_PW
:
22877 gen_helper_precrq_rs_qh_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22879 case OPC_PRECRQU_S_OB_QH
:
22881 gen_helper_precrqu_s_ob_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22888 tcg_temp_free(v1_t
);
22889 tcg_temp_free(v2_t
);
22892 static void gen_mipsdsp_shift(DisasContext
*ctx
, uint32_t opc
,
22893 int ret
, int v1
, int v2
)
22901 /* Treat as NOP. */
22905 t0
= tcg_temp_new();
22906 v1_t
= tcg_temp_new();
22907 v2_t
= tcg_temp_new();
22909 tcg_gen_movi_tl(t0
, v1
);
22910 gen_load_gpr(v1_t
, v1
);
22911 gen_load_gpr(v2_t
, v2
);
22914 case OPC_SHLL_QB_DSP
:
22916 op2
= MASK_SHLL_QB(ctx
->opcode
);
22920 gen_helper_shll_qb(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22924 gen_helper_shll_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22928 gen_helper_shll_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22932 gen_helper_shll_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22934 case OPC_SHLL_S_PH
:
22936 gen_helper_shll_s_ph(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22938 case OPC_SHLLV_S_PH
:
22940 gen_helper_shll_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22944 gen_helper_shll_s_w(cpu_gpr
[ret
], t0
, v2_t
, cpu_env
);
22946 case OPC_SHLLV_S_W
:
22948 gen_helper_shll_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
22952 gen_helper_shrl_qb(cpu_gpr
[ret
], t0
, v2_t
);
22956 gen_helper_shrl_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22960 gen_helper_shrl_ph(cpu_gpr
[ret
], t0
, v2_t
);
22964 gen_helper_shrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22968 gen_helper_shra_qb(cpu_gpr
[ret
], t0
, v2_t
);
22970 case OPC_SHRA_R_QB
:
22972 gen_helper_shra_r_qb(cpu_gpr
[ret
], t0
, v2_t
);
22976 gen_helper_shra_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22978 case OPC_SHRAV_R_QB
:
22980 gen_helper_shra_r_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
22984 gen_helper_shra_ph(cpu_gpr
[ret
], t0
, v2_t
);
22986 case OPC_SHRA_R_PH
:
22988 gen_helper_shra_r_ph(cpu_gpr
[ret
], t0
, v2_t
);
22992 gen_helper_shra_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
22994 case OPC_SHRAV_R_PH
:
22996 gen_helper_shra_r_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23000 gen_helper_shra_r_w(cpu_gpr
[ret
], t0
, v2_t
);
23002 case OPC_SHRAV_R_W
:
23004 gen_helper_shra_r_w(cpu_gpr
[ret
], v1_t
, v2_t
);
23006 default: /* Invalid */
23007 MIPS_INVAL("MASK SHLL.QB");
23008 gen_reserved_instruction(ctx
);
23013 #ifdef TARGET_MIPS64
23014 case OPC_SHLL_OB_DSP
:
23015 op2
= MASK_SHLL_OB(ctx
->opcode
);
23019 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23023 gen_helper_shll_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23025 case OPC_SHLL_S_PW
:
23027 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23029 case OPC_SHLLV_S_PW
:
23031 gen_helper_shll_s_pw(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23035 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23039 gen_helper_shll_ob(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23043 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23047 gen_helper_shll_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23049 case OPC_SHLL_S_QH
:
23051 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, t0
, cpu_env
);
23053 case OPC_SHLLV_S_QH
:
23055 gen_helper_shll_s_qh(cpu_gpr
[ret
], v2_t
, v1_t
, cpu_env
);
23059 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, t0
);
23063 gen_helper_shra_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23065 case OPC_SHRA_R_OB
:
23067 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, t0
);
23069 case OPC_SHRAV_R_OB
:
23071 gen_helper_shra_r_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23075 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, t0
);
23079 gen_helper_shra_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23081 case OPC_SHRA_R_PW
:
23083 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, t0
);
23085 case OPC_SHRAV_R_PW
:
23087 gen_helper_shra_r_pw(cpu_gpr
[ret
], v2_t
, v1_t
);
23091 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, t0
);
23095 gen_helper_shra_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23097 case OPC_SHRA_R_QH
:
23099 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, t0
);
23101 case OPC_SHRAV_R_QH
:
23103 gen_helper_shra_r_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23107 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, t0
);
23111 gen_helper_shrl_ob(cpu_gpr
[ret
], v2_t
, v1_t
);
23115 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, t0
);
23119 gen_helper_shrl_qh(cpu_gpr
[ret
], v2_t
, v1_t
);
23121 default: /* Invalid */
23122 MIPS_INVAL("MASK SHLL.OB");
23123 gen_reserved_instruction(ctx
);
23131 tcg_temp_free(v1_t
);
23132 tcg_temp_free(v2_t
);
23135 static void gen_mipsdsp_multiply(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23136 int ret
, int v1
, int v2
, int check_ret
)
23142 if ((ret
== 0) && (check_ret
== 1)) {
23143 /* Treat as NOP. */
23147 t0
= tcg_temp_new_i32();
23148 v1_t
= tcg_temp_new();
23149 v2_t
= tcg_temp_new();
23151 tcg_gen_movi_i32(t0
, ret
);
23152 gen_load_gpr(v1_t
, v1
);
23153 gen_load_gpr(v2_t
, v2
);
23157 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
23158 * the same mask and op1.
23160 case OPC_MULT_G_2E
:
23164 gen_helper_mul_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23167 gen_helper_mul_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23170 gen_helper_mulq_s_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23172 case OPC_MULQ_RS_W
:
23173 gen_helper_mulq_rs_w(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23177 case OPC_DPA_W_PH_DSP
:
23179 case OPC_DPAU_H_QBL
:
23181 gen_helper_dpau_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23183 case OPC_DPAU_H_QBR
:
23185 gen_helper_dpau_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23187 case OPC_DPSU_H_QBL
:
23189 gen_helper_dpsu_h_qbl(t0
, v1_t
, v2_t
, cpu_env
);
23191 case OPC_DPSU_H_QBR
:
23193 gen_helper_dpsu_h_qbr(t0
, v1_t
, v2_t
, cpu_env
);
23197 gen_helper_dpa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23199 case OPC_DPAX_W_PH
:
23201 gen_helper_dpax_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23203 case OPC_DPAQ_S_W_PH
:
23205 gen_helper_dpaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23207 case OPC_DPAQX_S_W_PH
:
23209 gen_helper_dpaqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23211 case OPC_DPAQX_SA_W_PH
:
23213 gen_helper_dpaqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23217 gen_helper_dps_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23219 case OPC_DPSX_W_PH
:
23221 gen_helper_dpsx_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23223 case OPC_DPSQ_S_W_PH
:
23225 gen_helper_dpsq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23227 case OPC_DPSQX_S_W_PH
:
23229 gen_helper_dpsqx_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23231 case OPC_DPSQX_SA_W_PH
:
23233 gen_helper_dpsqx_sa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23235 case OPC_MULSAQ_S_W_PH
:
23237 gen_helper_mulsaq_s_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23239 case OPC_DPAQ_SA_L_W
:
23241 gen_helper_dpaq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23243 case OPC_DPSQ_SA_L_W
:
23245 gen_helper_dpsq_sa_l_w(t0
, v1_t
, v2_t
, cpu_env
);
23247 case OPC_MAQ_S_W_PHL
:
23249 gen_helper_maq_s_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23251 case OPC_MAQ_S_W_PHR
:
23253 gen_helper_maq_s_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23255 case OPC_MAQ_SA_W_PHL
:
23257 gen_helper_maq_sa_w_phl(t0
, v1_t
, v2_t
, cpu_env
);
23259 case OPC_MAQ_SA_W_PHR
:
23261 gen_helper_maq_sa_w_phr(t0
, v1_t
, v2_t
, cpu_env
);
23263 case OPC_MULSA_W_PH
:
23265 gen_helper_mulsa_w_ph(t0
, v1_t
, v2_t
, cpu_env
);
23269 #ifdef TARGET_MIPS64
23270 case OPC_DPAQ_W_QH_DSP
:
23272 int ac
= ret
& 0x03;
23273 tcg_gen_movi_i32(t0
, ac
);
23278 gen_helper_dmadd(v1_t
, v2_t
, t0
, cpu_env
);
23282 gen_helper_dmaddu(v1_t
, v2_t
, t0
, cpu_env
);
23286 gen_helper_dmsub(v1_t
, v2_t
, t0
, cpu_env
);
23290 gen_helper_dmsubu(v1_t
, v2_t
, t0
, cpu_env
);
23294 gen_helper_dpa_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23296 case OPC_DPAQ_S_W_QH
:
23298 gen_helper_dpaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23300 case OPC_DPAQ_SA_L_PW
:
23302 gen_helper_dpaq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23304 case OPC_DPAU_H_OBL
:
23306 gen_helper_dpau_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23308 case OPC_DPAU_H_OBR
:
23310 gen_helper_dpau_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23314 gen_helper_dps_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23316 case OPC_DPSQ_S_W_QH
:
23318 gen_helper_dpsq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23320 case OPC_DPSQ_SA_L_PW
:
23322 gen_helper_dpsq_sa_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23324 case OPC_DPSU_H_OBL
:
23326 gen_helper_dpsu_h_obl(v1_t
, v2_t
, t0
, cpu_env
);
23328 case OPC_DPSU_H_OBR
:
23330 gen_helper_dpsu_h_obr(v1_t
, v2_t
, t0
, cpu_env
);
23332 case OPC_MAQ_S_L_PWL
:
23334 gen_helper_maq_s_l_pwl(v1_t
, v2_t
, t0
, cpu_env
);
23336 case OPC_MAQ_S_L_PWR
:
23338 gen_helper_maq_s_l_pwr(v1_t
, v2_t
, t0
, cpu_env
);
23340 case OPC_MAQ_S_W_QHLL
:
23342 gen_helper_maq_s_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23344 case OPC_MAQ_SA_W_QHLL
:
23346 gen_helper_maq_sa_w_qhll(v1_t
, v2_t
, t0
, cpu_env
);
23348 case OPC_MAQ_S_W_QHLR
:
23350 gen_helper_maq_s_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23352 case OPC_MAQ_SA_W_QHLR
:
23354 gen_helper_maq_sa_w_qhlr(v1_t
, v2_t
, t0
, cpu_env
);
23356 case OPC_MAQ_S_W_QHRL
:
23358 gen_helper_maq_s_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23360 case OPC_MAQ_SA_W_QHRL
:
23362 gen_helper_maq_sa_w_qhrl(v1_t
, v2_t
, t0
, cpu_env
);
23364 case OPC_MAQ_S_W_QHRR
:
23366 gen_helper_maq_s_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23368 case OPC_MAQ_SA_W_QHRR
:
23370 gen_helper_maq_sa_w_qhrr(v1_t
, v2_t
, t0
, cpu_env
);
23372 case OPC_MULSAQ_S_L_PW
:
23374 gen_helper_mulsaq_s_l_pw(v1_t
, v2_t
, t0
, cpu_env
);
23376 case OPC_MULSAQ_S_W_QH
:
23378 gen_helper_mulsaq_s_w_qh(v1_t
, v2_t
, t0
, cpu_env
);
23384 case OPC_ADDU_QB_DSP
:
23386 case OPC_MULEU_S_PH_QBL
:
23388 gen_helper_muleu_s_ph_qbl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23390 case OPC_MULEU_S_PH_QBR
:
23392 gen_helper_muleu_s_ph_qbr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23394 case OPC_MULQ_RS_PH
:
23396 gen_helper_mulq_rs_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23398 case OPC_MULEQ_S_W_PHL
:
23400 gen_helper_muleq_s_w_phl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23402 case OPC_MULEQ_S_W_PHR
:
23404 gen_helper_muleq_s_w_phr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23406 case OPC_MULQ_S_PH
:
23408 gen_helper_mulq_s_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23412 #ifdef TARGET_MIPS64
23413 case OPC_ADDU_OB_DSP
:
23415 case OPC_MULEQ_S_PW_QHL
:
23417 gen_helper_muleq_s_pw_qhl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23419 case OPC_MULEQ_S_PW_QHR
:
23421 gen_helper_muleq_s_pw_qhr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23423 case OPC_MULEU_S_QH_OBL
:
23425 gen_helper_muleu_s_qh_obl(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23427 case OPC_MULEU_S_QH_OBR
:
23429 gen_helper_muleu_s_qh_obr(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23431 case OPC_MULQ_RS_QH
:
23433 gen_helper_mulq_rs_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23440 tcg_temp_free_i32(t0
);
23441 tcg_temp_free(v1_t
);
23442 tcg_temp_free(v2_t
);
23445 static void gen_mipsdsp_bitinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23453 /* Treat as NOP. */
23457 t0
= tcg_temp_new();
23458 val_t
= tcg_temp_new();
23459 gen_load_gpr(val_t
, val
);
23462 case OPC_ABSQ_S_PH_DSP
:
23466 gen_helper_bitrev(cpu_gpr
[ret
], val_t
);
23471 target_long result
;
23472 imm
= (ctx
->opcode
>> 16) & 0xFF;
23473 result
= (uint32_t)imm
<< 24 |
23474 (uint32_t)imm
<< 16 |
23475 (uint32_t)imm
<< 8 |
23477 result
= (int32_t)result
;
23478 tcg_gen_movi_tl(cpu_gpr
[ret
], result
);
23483 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23484 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23485 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23486 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23487 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23488 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23493 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23494 imm
= (int16_t)(imm
<< 6) >> 6;
23495 tcg_gen_movi_tl(cpu_gpr
[ret
], \
23496 (target_long
)((int32_t)imm
<< 16 | \
23502 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23503 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23504 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23505 tcg_gen_ext32s_tl(cpu_gpr
[ret
], cpu_gpr
[ret
]);
23509 #ifdef TARGET_MIPS64
23510 case OPC_ABSQ_S_QH_DSP
:
23517 imm
= (ctx
->opcode
>> 16) & 0xFF;
23518 temp
= ((uint64_t)imm
<< 8) | (uint64_t)imm
;
23519 temp
= (temp
<< 16) | temp
;
23520 temp
= (temp
<< 32) | temp
;
23521 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23529 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23530 imm
= (int16_t)(imm
<< 6) >> 6;
23531 temp
= ((target_long
)imm
<< 32) \
23532 | ((target_long
)imm
& 0xFFFFFFFF);
23533 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23541 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23542 imm
= (int16_t)(imm
<< 6) >> 6;
23544 temp
= ((uint64_t)(uint16_t)imm
<< 48) |
23545 ((uint64_t)(uint16_t)imm
<< 32) |
23546 ((uint64_t)(uint16_t)imm
<< 16) |
23547 (uint64_t)(uint16_t)imm
;
23548 tcg_gen_movi_tl(cpu_gpr
[ret
], temp
);
23553 tcg_gen_ext8u_tl(cpu_gpr
[ret
], val_t
);
23554 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 8);
23555 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23556 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23557 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23558 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23559 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23563 tcg_gen_ext32u_i64(cpu_gpr
[ret
], val_t
);
23564 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23565 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23569 tcg_gen_ext16u_tl(cpu_gpr
[ret
], val_t
);
23570 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 16);
23571 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23572 tcg_gen_shli_tl(t0
, cpu_gpr
[ret
], 32);
23573 tcg_gen_or_tl(cpu_gpr
[ret
], cpu_gpr
[ret
], t0
);
23580 tcg_temp_free(val_t
);
23583 static void gen_mipsdsp_add_cmp_pick(DisasContext
*ctx
,
23584 uint32_t op1
, uint32_t op2
,
23585 int ret
, int v1
, int v2
, int check_ret
)
23591 if ((ret
== 0) && (check_ret
== 1)) {
23592 /* Treat as NOP. */
23596 t1
= tcg_temp_new();
23597 v1_t
= tcg_temp_new();
23598 v2_t
= tcg_temp_new();
23600 gen_load_gpr(v1_t
, v1
);
23601 gen_load_gpr(v2_t
, v2
);
23604 case OPC_CMPU_EQ_QB_DSP
:
23606 case OPC_CMPU_EQ_QB
:
23608 gen_helper_cmpu_eq_qb(v1_t
, v2_t
, cpu_env
);
23610 case OPC_CMPU_LT_QB
:
23612 gen_helper_cmpu_lt_qb(v1_t
, v2_t
, cpu_env
);
23614 case OPC_CMPU_LE_QB
:
23616 gen_helper_cmpu_le_qb(v1_t
, v2_t
, cpu_env
);
23618 case OPC_CMPGU_EQ_QB
:
23620 gen_helper_cmpgu_eq_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23622 case OPC_CMPGU_LT_QB
:
23624 gen_helper_cmpgu_lt_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23626 case OPC_CMPGU_LE_QB
:
23628 gen_helper_cmpgu_le_qb(cpu_gpr
[ret
], v1_t
, v2_t
);
23630 case OPC_CMPGDU_EQ_QB
:
23632 gen_helper_cmpgu_eq_qb(t1
, v1_t
, v2_t
);
23633 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23634 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23635 tcg_gen_shli_tl(t1
, t1
, 24);
23636 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23638 case OPC_CMPGDU_LT_QB
:
23640 gen_helper_cmpgu_lt_qb(t1
, v1_t
, v2_t
);
23641 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23642 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23643 tcg_gen_shli_tl(t1
, t1
, 24);
23644 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23646 case OPC_CMPGDU_LE_QB
:
23648 gen_helper_cmpgu_le_qb(t1
, v1_t
, v2_t
);
23649 tcg_gen_mov_tl(cpu_gpr
[ret
], t1
);
23650 tcg_gen_andi_tl(cpu_dspctrl
, cpu_dspctrl
, 0xF0FFFFFF);
23651 tcg_gen_shli_tl(t1
, t1
, 24);
23652 tcg_gen_or_tl(cpu_dspctrl
, cpu_dspctrl
, t1
);
23654 case OPC_CMP_EQ_PH
:
23656 gen_helper_cmp_eq_ph(v1_t
, v2_t
, cpu_env
);
23658 case OPC_CMP_LT_PH
:
23660 gen_helper_cmp_lt_ph(v1_t
, v2_t
, cpu_env
);
23662 case OPC_CMP_LE_PH
:
23664 gen_helper_cmp_le_ph(v1_t
, v2_t
, cpu_env
);
23668 gen_helper_pick_qb(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23672 gen_helper_pick_ph(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23674 case OPC_PACKRL_PH
:
23676 gen_helper_packrl_ph(cpu_gpr
[ret
], v1_t
, v2_t
);
23680 #ifdef TARGET_MIPS64
23681 case OPC_CMPU_EQ_OB_DSP
:
23683 case OPC_CMP_EQ_PW
:
23685 gen_helper_cmp_eq_pw(v1_t
, v2_t
, cpu_env
);
23687 case OPC_CMP_LT_PW
:
23689 gen_helper_cmp_lt_pw(v1_t
, v2_t
, cpu_env
);
23691 case OPC_CMP_LE_PW
:
23693 gen_helper_cmp_le_pw(v1_t
, v2_t
, cpu_env
);
23695 case OPC_CMP_EQ_QH
:
23697 gen_helper_cmp_eq_qh(v1_t
, v2_t
, cpu_env
);
23699 case OPC_CMP_LT_QH
:
23701 gen_helper_cmp_lt_qh(v1_t
, v2_t
, cpu_env
);
23703 case OPC_CMP_LE_QH
:
23705 gen_helper_cmp_le_qh(v1_t
, v2_t
, cpu_env
);
23707 case OPC_CMPGDU_EQ_OB
:
23709 gen_helper_cmpgdu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23711 case OPC_CMPGDU_LT_OB
:
23713 gen_helper_cmpgdu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23715 case OPC_CMPGDU_LE_OB
:
23717 gen_helper_cmpgdu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23719 case OPC_CMPGU_EQ_OB
:
23721 gen_helper_cmpgu_eq_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23723 case OPC_CMPGU_LT_OB
:
23725 gen_helper_cmpgu_lt_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23727 case OPC_CMPGU_LE_OB
:
23729 gen_helper_cmpgu_le_ob(cpu_gpr
[ret
], v1_t
, v2_t
);
23731 case OPC_CMPU_EQ_OB
:
23733 gen_helper_cmpu_eq_ob(v1_t
, v2_t
, cpu_env
);
23735 case OPC_CMPU_LT_OB
:
23737 gen_helper_cmpu_lt_ob(v1_t
, v2_t
, cpu_env
);
23739 case OPC_CMPU_LE_OB
:
23741 gen_helper_cmpu_le_ob(v1_t
, v2_t
, cpu_env
);
23743 case OPC_PACKRL_PW
:
23745 gen_helper_packrl_pw(cpu_gpr
[ret
], v1_t
, v2_t
);
23749 gen_helper_pick_ob(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23753 gen_helper_pick_pw(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23757 gen_helper_pick_qh(cpu_gpr
[ret
], v1_t
, v2_t
, cpu_env
);
23765 tcg_temp_free(v1_t
);
23766 tcg_temp_free(v2_t
);
23769 static void gen_mipsdsp_append(CPUMIPSState
*env
, DisasContext
*ctx
,
23770 uint32_t op1
, int rt
, int rs
, int sa
)
23777 /* Treat as NOP. */
23781 t0
= tcg_temp_new();
23782 gen_load_gpr(t0
, rs
);
23785 case OPC_APPEND_DSP
:
23786 switch (MASK_APPEND(ctx
->opcode
)) {
23789 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 32 - sa
);
23791 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23795 tcg_gen_ext32u_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23796 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23797 tcg_gen_shli_tl(t0
, t0
, 32 - sa
);
23798 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23800 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23804 if (sa
!= 0 && sa
!= 2) {
23805 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23806 tcg_gen_ext32u_tl(t0
, t0
);
23807 tcg_gen_shri_tl(t0
, t0
, 8 * (4 - sa
));
23808 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23810 tcg_gen_ext32s_tl(cpu_gpr
[rt
], cpu_gpr
[rt
]);
23812 default: /* Invalid */
23813 MIPS_INVAL("MASK APPEND");
23814 gen_reserved_instruction(ctx
);
23818 #ifdef TARGET_MIPS64
23819 case OPC_DAPPEND_DSP
:
23820 switch (MASK_DAPPEND(ctx
->opcode
)) {
23823 tcg_gen_deposit_tl(cpu_gpr
[rt
], t0
, cpu_gpr
[rt
], sa
, 64 - sa
);
23827 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 0x20 | sa
);
23828 tcg_gen_shli_tl(t0
, t0
, 64 - (0x20 | sa
));
23829 tcg_gen_or_tl(cpu_gpr
[rt
], t0
, t0
);
23833 tcg_gen_shri_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], sa
);
23834 tcg_gen_shli_tl(t0
, t0
, 64 - sa
);
23835 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23840 if (sa
!= 0 && sa
!= 2 && sa
!= 4) {
23841 tcg_gen_shli_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], 8 * sa
);
23842 tcg_gen_shri_tl(t0
, t0
, 8 * (8 - sa
));
23843 tcg_gen_or_tl(cpu_gpr
[rt
], cpu_gpr
[rt
], t0
);
23846 default: /* Invalid */
23847 MIPS_INVAL("MASK DAPPEND");
23848 gen_reserved_instruction(ctx
);
23857 static void gen_mipsdsp_accinsn(DisasContext
*ctx
, uint32_t op1
, uint32_t op2
,
23858 int ret
, int v1
, int v2
, int check_ret
)
23867 if ((ret
== 0) && (check_ret
== 1)) {
23868 /* Treat as NOP. */
23872 t0
= tcg_temp_new();
23873 t1
= tcg_temp_new();
23874 v1_t
= tcg_temp_new();
23875 v2_t
= tcg_temp_new();
23877 gen_load_gpr(v1_t
, v1
);
23878 gen_load_gpr(v2_t
, v2
);
23881 case OPC_EXTR_W_DSP
:
23885 tcg_gen_movi_tl(t0
, v2
);
23886 tcg_gen_movi_tl(t1
, v1
);
23887 gen_helper_extr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23890 tcg_gen_movi_tl(t0
, v2
);
23891 tcg_gen_movi_tl(t1
, v1
);
23892 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23894 case OPC_EXTR_RS_W
:
23895 tcg_gen_movi_tl(t0
, v2
);
23896 tcg_gen_movi_tl(t1
, v1
);
23897 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23900 tcg_gen_movi_tl(t0
, v2
);
23901 tcg_gen_movi_tl(t1
, v1
);
23902 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23904 case OPC_EXTRV_S_H
:
23905 tcg_gen_movi_tl(t0
, v2
);
23906 gen_helper_extr_s_h(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23909 tcg_gen_movi_tl(t0
, v2
);
23910 gen_helper_extr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23912 case OPC_EXTRV_R_W
:
23913 tcg_gen_movi_tl(t0
, v2
);
23914 gen_helper_extr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23916 case OPC_EXTRV_RS_W
:
23917 tcg_gen_movi_tl(t0
, v2
);
23918 gen_helper_extr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23921 tcg_gen_movi_tl(t0
, v2
);
23922 tcg_gen_movi_tl(t1
, v1
);
23923 gen_helper_extp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23926 tcg_gen_movi_tl(t0
, v2
);
23927 gen_helper_extp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23930 tcg_gen_movi_tl(t0
, v2
);
23931 tcg_gen_movi_tl(t1
, v1
);
23932 gen_helper_extpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23935 tcg_gen_movi_tl(t0
, v2
);
23936 gen_helper_extpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23939 imm
= (ctx
->opcode
>> 20) & 0x3F;
23940 tcg_gen_movi_tl(t0
, ret
);
23941 tcg_gen_movi_tl(t1
, imm
);
23942 gen_helper_shilo(t0
, t1
, cpu_env
);
23945 tcg_gen_movi_tl(t0
, ret
);
23946 gen_helper_shilo(t0
, v1_t
, cpu_env
);
23949 tcg_gen_movi_tl(t0
, ret
);
23950 gen_helper_mthlip(t0
, v1_t
, cpu_env
);
23953 imm
= (ctx
->opcode
>> 11) & 0x3FF;
23954 tcg_gen_movi_tl(t0
, imm
);
23955 gen_helper_wrdsp(v1_t
, t0
, cpu_env
);
23958 imm
= (ctx
->opcode
>> 16) & 0x03FF;
23959 tcg_gen_movi_tl(t0
, imm
);
23960 gen_helper_rddsp(cpu_gpr
[ret
], t0
, cpu_env
);
23964 #ifdef TARGET_MIPS64
23965 case OPC_DEXTR_W_DSP
:
23969 tcg_gen_movi_tl(t0
, ret
);
23970 gen_helper_dmthlip(v1_t
, t0
, cpu_env
);
23974 int shift
= (ctx
->opcode
>> 19) & 0x7F;
23975 int ac
= (ctx
->opcode
>> 11) & 0x03;
23976 tcg_gen_movi_tl(t0
, shift
);
23977 tcg_gen_movi_tl(t1
, ac
);
23978 gen_helper_dshilo(t0
, t1
, cpu_env
);
23983 int ac
= (ctx
->opcode
>> 11) & 0x03;
23984 tcg_gen_movi_tl(t0
, ac
);
23985 gen_helper_dshilo(v1_t
, t0
, cpu_env
);
23989 tcg_gen_movi_tl(t0
, v2
);
23990 tcg_gen_movi_tl(t1
, v1
);
23992 gen_helper_dextp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
23995 tcg_gen_movi_tl(t0
, v2
);
23996 gen_helper_dextp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
23999 tcg_gen_movi_tl(t0
, v2
);
24000 tcg_gen_movi_tl(t1
, v1
);
24001 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24004 tcg_gen_movi_tl(t0
, v2
);
24005 gen_helper_dextpdp(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24008 tcg_gen_movi_tl(t0
, v2
);
24009 tcg_gen_movi_tl(t1
, v1
);
24010 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24012 case OPC_DEXTR_R_L
:
24013 tcg_gen_movi_tl(t0
, v2
);
24014 tcg_gen_movi_tl(t1
, v1
);
24015 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24017 case OPC_DEXTR_RS_L
:
24018 tcg_gen_movi_tl(t0
, v2
);
24019 tcg_gen_movi_tl(t1
, v1
);
24020 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24023 tcg_gen_movi_tl(t0
, v2
);
24024 tcg_gen_movi_tl(t1
, v1
);
24025 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24027 case OPC_DEXTR_R_W
:
24028 tcg_gen_movi_tl(t0
, v2
);
24029 tcg_gen_movi_tl(t1
, v1
);
24030 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24032 case OPC_DEXTR_RS_W
:
24033 tcg_gen_movi_tl(t0
, v2
);
24034 tcg_gen_movi_tl(t1
, v1
);
24035 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24037 case OPC_DEXTR_S_H
:
24038 tcg_gen_movi_tl(t0
, v2
);
24039 tcg_gen_movi_tl(t1
, v1
);
24040 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24042 case OPC_DEXTRV_S_H
:
24043 tcg_gen_movi_tl(t0
, v2
);
24044 tcg_gen_movi_tl(t1
, v1
);
24045 gen_helper_dextr_s_h(cpu_gpr
[ret
], t0
, t1
, cpu_env
);
24048 tcg_gen_movi_tl(t0
, v2
);
24049 gen_helper_dextr_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24051 case OPC_DEXTRV_R_L
:
24052 tcg_gen_movi_tl(t0
, v2
);
24053 gen_helper_dextr_r_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24055 case OPC_DEXTRV_RS_L
:
24056 tcg_gen_movi_tl(t0
, v2
);
24057 gen_helper_dextr_rs_l(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24060 tcg_gen_movi_tl(t0
, v2
);
24061 gen_helper_dextr_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24063 case OPC_DEXTRV_R_W
:
24064 tcg_gen_movi_tl(t0
, v2
);
24065 gen_helper_dextr_r_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24067 case OPC_DEXTRV_RS_W
:
24068 tcg_gen_movi_tl(t0
, v2
);
24069 gen_helper_dextr_rs_w(cpu_gpr
[ret
], t0
, v1_t
, cpu_env
);
24078 tcg_temp_free(v1_t
);
24079 tcg_temp_free(v2_t
);
24082 /* End MIPSDSP functions. */
24084 static void decode_opc_special_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
24086 int rs
, rt
, rd
, sa
;
24089 rs
= (ctx
->opcode
>> 21) & 0x1f;
24090 rt
= (ctx
->opcode
>> 16) & 0x1f;
24091 rd
= (ctx
->opcode
>> 11) & 0x1f;
24092 sa
= (ctx
->opcode
>> 6) & 0x1f;
24094 op1
= MASK_SPECIAL(ctx
->opcode
);
24100 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24110 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24113 MIPS_INVAL("special_r6 muldiv");
24114 gen_reserved_instruction(ctx
);
24120 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24124 if (rt
== 0 && sa
== 1) {
24126 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24127 * We need additionally to check other fields.
24129 gen_cl(ctx
, op1
, rd
, rs
);
24131 gen_reserved_instruction(ctx
);
24135 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
24136 gen_helper_do_semihosting(cpu_env
);
24138 if (ctx
->hflags
& MIPS_HFLAG_SBRI
) {
24139 gen_reserved_instruction(ctx
);
24141 generate_exception_end(ctx
, EXCP_DBp
);
24145 #if defined(TARGET_MIPS64)
24148 if (rt
== 0 && sa
== 1) {
24150 * Major opcode and function field is shared with preR6 MFHI/MTHI.
24151 * We need additionally to check other fields.
24153 check_mips_64(ctx
);
24154 gen_cl(ctx
, op1
, rd
, rs
);
24156 gen_reserved_instruction(ctx
);
24164 op2
= MASK_R6_MULDIV(ctx
->opcode
);
24174 check_mips_64(ctx
);
24175 gen_r6_muldiv(ctx
, op2
, rd
, rs
, rt
);
24178 MIPS_INVAL("special_r6 muldiv");
24179 gen_reserved_instruction(ctx
);
24184 default: /* Invalid */
24185 MIPS_INVAL("special_r6");
24186 gen_reserved_instruction(ctx
);
24191 static void decode_opc_special_tx79(CPUMIPSState
*env
, DisasContext
*ctx
)
24193 int rs
= extract32(ctx
->opcode
, 21, 5);
24194 int rt
= extract32(ctx
->opcode
, 16, 5);
24195 int rd
= extract32(ctx
->opcode
, 11, 5);
24196 uint32_t op1
= MASK_SPECIAL(ctx
->opcode
);
24199 case OPC_MOVN
: /* Conditional move */
24201 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24203 case OPC_MFHI
: /* Move from HI/LO */
24205 gen_HILO(ctx
, op1
, 0, rd
);
24208 case OPC_MTLO
: /* Move to HI/LO */
24209 gen_HILO(ctx
, op1
, 0, rs
);
24213 gen_mul_txx9(ctx
, op1
, rd
, rs
, rt
);
24217 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24219 #if defined(TARGET_MIPS64)
24224 check_insn_opc_user_only(ctx
, INSN_R5900
);
24225 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24229 gen_compute_branch(ctx
, op1
, 4, rs
, 0, 0, 4);
24231 default: /* Invalid */
24232 MIPS_INVAL("special_tx79");
24233 gen_reserved_instruction(ctx
);
24238 static void decode_opc_special_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
24240 int rs
, rt
, rd
, sa
;
24243 rs
= (ctx
->opcode
>> 21) & 0x1f;
24244 rt
= (ctx
->opcode
>> 16) & 0x1f;
24245 rd
= (ctx
->opcode
>> 11) & 0x1f;
24246 sa
= (ctx
->opcode
>> 6) & 0x1f;
24248 op1
= MASK_SPECIAL(ctx
->opcode
);
24250 case OPC_MOVN
: /* Conditional move */
24252 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
|
24253 INSN_LOONGSON2E
| INSN_LOONGSON2F
);
24254 gen_cond_move(ctx
, op1
, rd
, rs
, rt
);
24256 case OPC_MFHI
: /* Move from HI/LO */
24258 gen_HILO(ctx
, op1
, rs
& 3, rd
);
24261 case OPC_MTLO
: /* Move to HI/LO */
24262 gen_HILO(ctx
, op1
, rd
& 3, rs
);
24265 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
24266 if (env
->CP0_Config1
& (1 << CP0C1_FP
)) {
24267 check_cp1_enabled(ctx
);
24268 gen_movci(ctx
, rd
, rs
, (ctx
->opcode
>> 18) & 0x7,
24269 (ctx
->opcode
>> 16) & 1);
24271 generate_exception_err(ctx
, EXCP_CpU
, 1);
24277 check_insn(ctx
, INSN_VR54XX
);
24278 op1
= MASK_MUL_VR54XX(ctx
->opcode
);
24279 gen_mul_vr54xx(ctx
, op1
, rd
, rs
, rt
);
24281 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
24286 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24288 #if defined(TARGET_MIPS64)
24293 check_insn(ctx
, ISA_MIPS3
);
24294 check_mips_64(ctx
);
24295 gen_muldiv(ctx
, op1
, 0, rs
, rt
);
24299 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24302 #ifdef MIPS_STRICT_STANDARD
24303 MIPS_INVAL("SPIM");
24304 gen_reserved_instruction(ctx
);
24306 /* Implemented as RI exception for now. */
24307 MIPS_INVAL("spim (unofficial)");
24308 gen_reserved_instruction(ctx
);
24311 default: /* Invalid */
24312 MIPS_INVAL("special_legacy");
24313 gen_reserved_instruction(ctx
);
24318 static void decode_opc_special(CPUMIPSState
*env
, DisasContext
*ctx
)
24320 int rs
, rt
, rd
, sa
;
24323 rs
= (ctx
->opcode
>> 21) & 0x1f;
24324 rt
= (ctx
->opcode
>> 16) & 0x1f;
24325 rd
= (ctx
->opcode
>> 11) & 0x1f;
24326 sa
= (ctx
->opcode
>> 6) & 0x1f;
24328 op1
= MASK_SPECIAL(ctx
->opcode
);
24330 case OPC_SLL
: /* Shift with immediate */
24331 if (sa
== 5 && rd
== 0 &&
24332 rs
== 0 && rt
== 0) { /* PAUSE */
24333 if ((ctx
->insn_flags
& ISA_MIPS_R6
) &&
24334 (ctx
->hflags
& MIPS_HFLAG_BMASK
)) {
24335 gen_reserved_instruction(ctx
);
24341 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24344 switch ((ctx
->opcode
>> 21) & 0x1f) {
24346 /* rotr is decoded as srl on non-R2 CPUs */
24347 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
24352 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24355 gen_reserved_instruction(ctx
);
24363 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24365 case OPC_SLLV
: /* Shifts */
24367 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24370 switch ((ctx
->opcode
>> 6) & 0x1f) {
24372 /* rotrv is decoded as srlv on non-R2 CPUs */
24373 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
24378 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24381 gen_reserved_instruction(ctx
);
24385 case OPC_SLT
: /* Set on less than */
24387 gen_slt(ctx
, op1
, rd
, rs
, rt
);
24389 case OPC_AND
: /* Logic*/
24393 gen_logic(ctx
, op1
, rd
, rs
, rt
);
24396 gen_compute_branch(ctx
, op1
, 4, rs
, rd
, sa
, 4);
24398 case OPC_TGE
: /* Traps */
24404 check_insn(ctx
, ISA_MIPS2
);
24405 gen_trap(ctx
, op1
, rs
, rt
, -1);
24408 /* Pmon entry point, also R4010 selsl */
24409 #ifdef MIPS_STRICT_STANDARD
24410 MIPS_INVAL("PMON / selsl");
24411 gen_reserved_instruction(ctx
);
24413 gen_helper_0e0i(pmon
, sa
);
24417 generate_exception_end(ctx
, EXCP_SYSCALL
);
24420 generate_exception_end(ctx
, EXCP_BREAK
);
24423 check_insn(ctx
, ISA_MIPS2
);
24424 gen_sync(extract32(ctx
->opcode
, 6, 5));
24427 #if defined(TARGET_MIPS64)
24428 /* MIPS64 specific opcodes */
24433 check_insn(ctx
, ISA_MIPS3
);
24434 check_mips_64(ctx
);
24435 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24438 switch ((ctx
->opcode
>> 21) & 0x1f) {
24440 /* drotr is decoded as dsrl on non-R2 CPUs */
24441 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
24446 check_insn(ctx
, ISA_MIPS3
);
24447 check_mips_64(ctx
);
24448 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24451 gen_reserved_instruction(ctx
);
24456 switch ((ctx
->opcode
>> 21) & 0x1f) {
24458 /* drotr32 is decoded as dsrl32 on non-R2 CPUs */
24459 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
24464 check_insn(ctx
, ISA_MIPS3
);
24465 check_mips_64(ctx
);
24466 gen_shift_imm(ctx
, op1
, rd
, rt
, sa
);
24469 gen_reserved_instruction(ctx
);
24477 check_insn(ctx
, ISA_MIPS3
);
24478 check_mips_64(ctx
);
24479 gen_arith(ctx
, op1
, rd
, rs
, rt
);
24483 check_insn(ctx
, ISA_MIPS3
);
24484 check_mips_64(ctx
);
24485 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24488 switch ((ctx
->opcode
>> 6) & 0x1f) {
24490 /* drotrv is decoded as dsrlv on non-R2 CPUs */
24491 if (ctx
->insn_flags
& ISA_MIPS_R2
) {
24496 check_insn(ctx
, ISA_MIPS3
);
24497 check_mips_64(ctx
);
24498 gen_shift(ctx
, op1
, rd
, rs
, rt
);
24501 gen_reserved_instruction(ctx
);
24507 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
24508 decode_opc_special_r6(env
, ctx
);
24509 } else if (ctx
->insn_flags
& INSN_R5900
) {
24510 decode_opc_special_tx79(env
, ctx
);
24512 decode_opc_special_legacy(env
, ctx
);
24518 #if defined(TARGET_MIPS64)
24522 * MMI (MultiMedia Interface) ASE instructions
24523 * ===========================================
24527 * MMI instructions category: data communication
24528 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
24530 * PCPYH PEXCH PEXTLB PINTH PPACB PEXT5 PREVH
24531 * PCPYLD PEXCW PEXTLH PINTEH PPACH PPAC5 PROT3W
24532 * PCPYUD PEXEH PEXTLW PPACW
24541 * Parallel Copy Halfword
24543 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24544 * +-----------+---------+---------+---------+---------+-----------+
24545 * | MMI |0 0 0 0 0| rt | rd | PCPYH | MMI3 |
24546 * +-----------+---------+---------+---------+---------+-----------+
24548 static void gen_mmi_pcpyh(DisasContext
*ctx
)
24550 uint32_t pd
, rt
, rd
;
24553 opcode
= ctx
->opcode
;
24555 pd
= extract32(opcode
, 21, 5);
24556 rt
= extract32(opcode
, 16, 5);
24557 rd
= extract32(opcode
, 11, 5);
24559 if (unlikely(pd
!= 0)) {
24560 gen_reserved_instruction(ctx
);
24561 } else if (rd
== 0) {
24563 } else if (rt
== 0) {
24564 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24565 tcg_gen_movi_i64(cpu_gpr_hi
[rd
], 0);
24567 TCGv_i64 t0
= tcg_temp_new();
24568 TCGv_i64 t1
= tcg_temp_new();
24569 uint64_t mask
= (1ULL << 16) - 1;
24571 tcg_gen_andi_i64(t0
, cpu_gpr
[rt
], mask
);
24572 tcg_gen_movi_i64(t1
, 0);
24573 tcg_gen_or_i64(t1
, t0
, t1
);
24574 tcg_gen_shli_i64(t0
, t0
, 16);
24575 tcg_gen_or_i64(t1
, t0
, t1
);
24576 tcg_gen_shli_i64(t0
, t0
, 16);
24577 tcg_gen_or_i64(t1
, t0
, t1
);
24578 tcg_gen_shli_i64(t0
, t0
, 16);
24579 tcg_gen_or_i64(t1
, t0
, t1
);
24581 tcg_gen_mov_i64(cpu_gpr
[rd
], t1
);
24583 tcg_gen_andi_i64(t0
, cpu_gpr_hi
[rt
], mask
);
24584 tcg_gen_movi_i64(t1
, 0);
24585 tcg_gen_or_i64(t1
, t0
, t1
);
24586 tcg_gen_shli_i64(t0
, t0
, 16);
24587 tcg_gen_or_i64(t1
, t0
, t1
);
24588 tcg_gen_shli_i64(t0
, t0
, 16);
24589 tcg_gen_or_i64(t1
, t0
, t1
);
24590 tcg_gen_shli_i64(t0
, t0
, 16);
24591 tcg_gen_or_i64(t1
, t0
, t1
);
24593 tcg_gen_mov_i64(cpu_gpr_hi
[rd
], t1
);
24601 * PCPYLD rd, rs, rt
24603 * Parallel Copy Lower Doubleword
24605 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24606 * +-----------+---------+---------+---------+---------+-----------+
24607 * | MMI | rs | rt | rd | PCPYLD | MMI2 |
24608 * +-----------+---------+---------+---------+---------+-----------+
24610 static void gen_mmi_pcpyld(DisasContext
*ctx
)
24612 uint32_t rs
, rt
, rd
;
24615 opcode
= ctx
->opcode
;
24617 rs
= extract32(opcode
, 21, 5);
24618 rt
= extract32(opcode
, 16, 5);
24619 rd
= extract32(opcode
, 11, 5);
24625 tcg_gen_movi_i64(cpu_gpr_hi
[rd
], 0);
24627 tcg_gen_mov_i64(cpu_gpr_hi
[rd
], cpu_gpr
[rs
]);
24630 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24633 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr
[rt
]);
24640 * PCPYUD rd, rs, rt
24642 * Parallel Copy Upper Doubleword
24644 * 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
24645 * +-----------+---------+---------+---------+---------+-----------+
24646 * | MMI | rs | rt | rd | PCPYUD | MMI3 |
24647 * +-----------+---------+---------+---------+---------+-----------+
24649 static void gen_mmi_pcpyud(DisasContext
*ctx
)
24651 uint32_t rs
, rt
, rd
;
24654 opcode
= ctx
->opcode
;
24656 rs
= extract32(opcode
, 21, 5);
24657 rt
= extract32(opcode
, 16, 5);
24658 rd
= extract32(opcode
, 11, 5);
24664 tcg_gen_movi_i64(cpu_gpr
[rd
], 0);
24666 tcg_gen_mov_i64(cpu_gpr
[rd
], cpu_gpr_hi
[rs
]);
24669 tcg_gen_movi_i64(cpu_gpr_hi
[rd
], 0);
24672 tcg_gen_mov_i64(cpu_gpr_hi
[rd
], cpu_gpr_hi
[rt
]);
24681 #if !defined(TARGET_MIPS64)
24683 /* MXU accumulate add/subtract 1-bit pattern 'aptn1' */
24684 #define MXU_APTN1_A 0
24685 #define MXU_APTN1_S 1
24687 /* MXU accumulate add/subtract 2-bit pattern 'aptn2' */
24688 #define MXU_APTN2_AA 0
24689 #define MXU_APTN2_AS 1
24690 #define MXU_APTN2_SA 2
24691 #define MXU_APTN2_SS 3
24693 /* MXU execute add/subtract 2-bit pattern 'eptn2' */
24694 #define MXU_EPTN2_AA 0
24695 #define MXU_EPTN2_AS 1
24696 #define MXU_EPTN2_SA 2
24697 #define MXU_EPTN2_SS 3
24699 /* MXU operand getting pattern 'optn2' */
24700 #define MXU_OPTN2_PTN0 0
24701 #define MXU_OPTN2_PTN1 1
24702 #define MXU_OPTN2_PTN2 2
24703 #define MXU_OPTN2_PTN3 3
24704 /* alternative naming scheme for 'optn2' */
24705 #define MXU_OPTN2_WW 0
24706 #define MXU_OPTN2_LW 1
24707 #define MXU_OPTN2_HW 2
24708 #define MXU_OPTN2_XW 3
24710 /* MXU operand getting pattern 'optn3' */
24711 #define MXU_OPTN3_PTN0 0
24712 #define MXU_OPTN3_PTN1 1
24713 #define MXU_OPTN3_PTN2 2
24714 #define MXU_OPTN3_PTN3 3
24715 #define MXU_OPTN3_PTN4 4
24716 #define MXU_OPTN3_PTN5 5
24717 #define MXU_OPTN3_PTN6 6
24718 #define MXU_OPTN3_PTN7 7
24722 * S32I2M XRa, rb - Register move from GRF to XRF
24724 static void gen_mxu_s32i2m(DisasContext
*ctx
)
24729 t0
= tcg_temp_new();
24731 XRa
= extract32(ctx
->opcode
, 6, 5);
24732 Rb
= extract32(ctx
->opcode
, 16, 5);
24734 gen_load_gpr(t0
, Rb
);
24736 gen_store_mxu_gpr(t0
, XRa
);
24737 } else if (XRa
== 16) {
24738 gen_store_mxu_cr(t0
);
24745 * S32M2I XRa, rb - Register move from XRF to GRF
24747 static void gen_mxu_s32m2i(DisasContext
*ctx
)
24752 t0
= tcg_temp_new();
24754 XRa
= extract32(ctx
->opcode
, 6, 5);
24755 Rb
= extract32(ctx
->opcode
, 16, 5);
24758 gen_load_mxu_gpr(t0
, XRa
);
24759 } else if (XRa
== 16) {
24760 gen_load_mxu_cr(t0
);
24763 gen_store_gpr(t0
, Rb
);
24769 * S8LDD XRa, Rb, s8, optn3 - Load a byte from memory to XRF
24771 static void gen_mxu_s8ldd(DisasContext
*ctx
)
24774 uint32_t XRa
, Rb
, s8
, optn3
;
24776 t0
= tcg_temp_new();
24777 t1
= tcg_temp_new();
24779 XRa
= extract32(ctx
->opcode
, 6, 4);
24780 s8
= extract32(ctx
->opcode
, 10, 8);
24781 optn3
= extract32(ctx
->opcode
, 18, 3);
24782 Rb
= extract32(ctx
->opcode
, 21, 5);
24784 gen_load_gpr(t0
, Rb
);
24785 tcg_gen_addi_tl(t0
, t0
, (int8_t)s8
);
24788 /* XRa[7:0] = tmp8 */
24789 case MXU_OPTN3_PTN0
:
24790 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24791 gen_load_mxu_gpr(t0
, XRa
);
24792 tcg_gen_deposit_tl(t0
, t0
, t1
, 0, 8);
24794 /* XRa[15:8] = tmp8 */
24795 case MXU_OPTN3_PTN1
:
24796 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24797 gen_load_mxu_gpr(t0
, XRa
);
24798 tcg_gen_deposit_tl(t0
, t0
, t1
, 8, 8);
24800 /* XRa[23:16] = tmp8 */
24801 case MXU_OPTN3_PTN2
:
24802 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24803 gen_load_mxu_gpr(t0
, XRa
);
24804 tcg_gen_deposit_tl(t0
, t0
, t1
, 16, 8);
24806 /* XRa[31:24] = tmp8 */
24807 case MXU_OPTN3_PTN3
:
24808 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24809 gen_load_mxu_gpr(t0
, XRa
);
24810 tcg_gen_deposit_tl(t0
, t0
, t1
, 24, 8);
24812 /* XRa = {8'b0, tmp8, 8'b0, tmp8} */
24813 case MXU_OPTN3_PTN4
:
24814 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24815 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24817 /* XRa = {tmp8, 8'b0, tmp8, 8'b0} */
24818 case MXU_OPTN3_PTN5
:
24819 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24820 tcg_gen_shli_tl(t1
, t1
, 8);
24821 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24823 /* XRa = {{8{sign of tmp8}}, tmp8, {8{sign of tmp8}}, tmp8} */
24824 case MXU_OPTN3_PTN6
:
24825 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_SB
);
24826 tcg_gen_mov_tl(t0
, t1
);
24827 tcg_gen_andi_tl(t0
, t0
, 0xFF00FFFF);
24828 tcg_gen_shli_tl(t1
, t1
, 16);
24829 tcg_gen_or_tl(t0
, t0
, t1
);
24831 /* XRa = {tmp8, tmp8, tmp8, tmp8} */
24832 case MXU_OPTN3_PTN7
:
24833 tcg_gen_qemu_ld_tl(t1
, t0
, ctx
->mem_idx
, MO_UB
);
24834 tcg_gen_deposit_tl(t1
, t1
, t1
, 8, 8);
24835 tcg_gen_deposit_tl(t0
, t1
, t1
, 16, 16);
24839 gen_store_mxu_gpr(t0
, XRa
);
24846 * D16MUL XRa, XRb, XRc, XRd, optn2 - Signed 16 bit pattern multiplication
24848 static void gen_mxu_d16mul(DisasContext
*ctx
)
24850 TCGv t0
, t1
, t2
, t3
;
24851 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
;
24853 t0
= tcg_temp_new();
24854 t1
= tcg_temp_new();
24855 t2
= tcg_temp_new();
24856 t3
= tcg_temp_new();
24858 XRa
= extract32(ctx
->opcode
, 6, 4);
24859 XRb
= extract32(ctx
->opcode
, 10, 4);
24860 XRc
= extract32(ctx
->opcode
, 14, 4);
24861 XRd
= extract32(ctx
->opcode
, 18, 4);
24862 optn2
= extract32(ctx
->opcode
, 22, 2);
24864 gen_load_mxu_gpr(t1
, XRb
);
24865 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24866 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24867 gen_load_mxu_gpr(t3
, XRc
);
24868 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24869 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24872 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24873 tcg_gen_mul_tl(t3
, t1
, t3
);
24874 tcg_gen_mul_tl(t2
, t0
, t2
);
24876 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24877 tcg_gen_mul_tl(t3
, t0
, t3
);
24878 tcg_gen_mul_tl(t2
, t0
, t2
);
24880 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24881 tcg_gen_mul_tl(t3
, t1
, t3
);
24882 tcg_gen_mul_tl(t2
, t1
, t2
);
24884 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24885 tcg_gen_mul_tl(t3
, t0
, t3
);
24886 tcg_gen_mul_tl(t2
, t1
, t2
);
24889 gen_store_mxu_gpr(t3
, XRa
);
24890 gen_store_mxu_gpr(t2
, XRd
);
24899 * D16MAC XRa, XRb, XRc, XRd, aptn2, optn2 - Signed 16 bit pattern multiply
24902 static void gen_mxu_d16mac(DisasContext
*ctx
)
24904 TCGv t0
, t1
, t2
, t3
;
24905 uint32_t XRa
, XRb
, XRc
, XRd
, optn2
, aptn2
;
24907 t0
= tcg_temp_new();
24908 t1
= tcg_temp_new();
24909 t2
= tcg_temp_new();
24910 t3
= tcg_temp_new();
24912 XRa
= extract32(ctx
->opcode
, 6, 4);
24913 XRb
= extract32(ctx
->opcode
, 10, 4);
24914 XRc
= extract32(ctx
->opcode
, 14, 4);
24915 XRd
= extract32(ctx
->opcode
, 18, 4);
24916 optn2
= extract32(ctx
->opcode
, 22, 2);
24917 aptn2
= extract32(ctx
->opcode
, 24, 2);
24919 gen_load_mxu_gpr(t1
, XRb
);
24920 tcg_gen_sextract_tl(t0
, t1
, 0, 16);
24921 tcg_gen_sextract_tl(t1
, t1
, 16, 16);
24923 gen_load_mxu_gpr(t3
, XRc
);
24924 tcg_gen_sextract_tl(t2
, t3
, 0, 16);
24925 tcg_gen_sextract_tl(t3
, t3
, 16, 16);
24928 case MXU_OPTN2_WW
: /* XRB.H*XRC.H == lop, XRB.L*XRC.L == rop */
24929 tcg_gen_mul_tl(t3
, t1
, t3
);
24930 tcg_gen_mul_tl(t2
, t0
, t2
);
24932 case MXU_OPTN2_LW
: /* XRB.L*XRC.H == lop, XRB.L*XRC.L == rop */
24933 tcg_gen_mul_tl(t3
, t0
, t3
);
24934 tcg_gen_mul_tl(t2
, t0
, t2
);
24936 case MXU_OPTN2_HW
: /* XRB.H*XRC.H == lop, XRB.H*XRC.L == rop */
24937 tcg_gen_mul_tl(t3
, t1
, t3
);
24938 tcg_gen_mul_tl(t2
, t1
, t2
);
24940 case MXU_OPTN2_XW
: /* XRB.L*XRC.H == lop, XRB.H*XRC.L == rop */
24941 tcg_gen_mul_tl(t3
, t0
, t3
);
24942 tcg_gen_mul_tl(t2
, t1
, t2
);
24945 gen_load_mxu_gpr(t0
, XRa
);
24946 gen_load_mxu_gpr(t1
, XRd
);
24950 tcg_gen_add_tl(t3
, t0
, t3
);
24951 tcg_gen_add_tl(t2
, t1
, t2
);
24954 tcg_gen_add_tl(t3
, t0
, t3
);
24955 tcg_gen_sub_tl(t2
, t1
, t2
);
24958 tcg_gen_sub_tl(t3
, t0
, t3
);
24959 tcg_gen_add_tl(t2
, t1
, t2
);
24962 tcg_gen_sub_tl(t3
, t0
, t3
);
24963 tcg_gen_sub_tl(t2
, t1
, t2
);
24966 gen_store_mxu_gpr(t3
, XRa
);
24967 gen_store_mxu_gpr(t2
, XRd
);
24976 * Q8MUL XRa, XRb, XRc, XRd - Parallel unsigned 8 bit pattern multiply
24977 * Q8MULSU XRa, XRb, XRc, XRd - Parallel signed 8 bit pattern multiply
24979 static void gen_mxu_q8mul_q8mulsu(DisasContext
*ctx
)
24981 TCGv t0
, t1
, t2
, t3
, t4
, t5
, t6
, t7
;
24982 uint32_t XRa
, XRb
, XRc
, XRd
, sel
;
24984 t0
= tcg_temp_new();
24985 t1
= tcg_temp_new();
24986 t2
= tcg_temp_new();
24987 t3
= tcg_temp_new();
24988 t4
= tcg_temp_new();
24989 t5
= tcg_temp_new();
24990 t6
= tcg_temp_new();
24991 t7
= tcg_temp_new();
24993 XRa
= extract32(ctx
->opcode
, 6, 4);
24994 XRb
= extract32(ctx
->opcode
, 10, 4);
24995 XRc
= extract32(ctx
->opcode
, 14, 4);
24996 XRd
= extract32(ctx
->opcode
, 18, 4);
24997 sel
= extract32(ctx
->opcode
, 22, 2);
24999 gen_load_mxu_gpr(t3
, XRb
);
25000 gen_load_mxu_gpr(t7
, XRc
);
25004 tcg_gen_ext8s_tl(t0
, t3
);
25005 tcg_gen_shri_tl(t3
, t3
, 8);
25006 tcg_gen_ext8s_tl(t1
, t3
);
25007 tcg_gen_shri_tl(t3
, t3
, 8);
25008 tcg_gen_ext8s_tl(t2
, t3
);
25009 tcg_gen_shri_tl(t3
, t3
, 8);
25010 tcg_gen_ext8s_tl(t3
, t3
);
25013 tcg_gen_ext8u_tl(t0
, t3
);
25014 tcg_gen_shri_tl(t3
, t3
, 8);
25015 tcg_gen_ext8u_tl(t1
, t3
);
25016 tcg_gen_shri_tl(t3
, t3
, 8);
25017 tcg_gen_ext8u_tl(t2
, t3
);
25018 tcg_gen_shri_tl(t3
, t3
, 8);
25019 tcg_gen_ext8u_tl(t3
, t3
);
25022 tcg_gen_ext8u_tl(t4
, t7
);
25023 tcg_gen_shri_tl(t7
, t7
, 8);
25024 tcg_gen_ext8u_tl(t5
, t7
);
25025 tcg_gen_shri_tl(t7
, t7
, 8);
25026 tcg_gen_ext8u_tl(t6
, t7
);
25027 tcg_gen_shri_tl(t7
, t7
, 8);
25028 tcg_gen_ext8u_tl(t7
, t7
);
25030 tcg_gen_mul_tl(t0
, t0
, t4
);
25031 tcg_gen_mul_tl(t1
, t1
, t5
);
25032 tcg_gen_mul_tl(t2
, t2
, t6
);
25033 tcg_gen_mul_tl(t3
, t3
, t7
);
25035 tcg_gen_andi_tl(t0
, t0
, 0xFFFF);
25036 tcg_gen_andi_tl(t1
, t1
, 0xFFFF);
25037 tcg_gen_andi_tl(t2
, t2
, 0xFFFF);
25038 tcg_gen_andi_tl(t3
, t3
, 0xFFFF);
25040 tcg_gen_shli_tl(t1
, t1
, 16);
25041 tcg_gen_shli_tl(t3
, t3
, 16);
25043 tcg_gen_or_tl(t0
, t0
, t1
);
25044 tcg_gen_or_tl(t1
, t2
, t3
);
25046 gen_store_mxu_gpr(t0
, XRd
);
25047 gen_store_mxu_gpr(t1
, XRa
);
25060 * S32LDD XRa, Rb, S12 - Load a word from memory to XRF
25061 * S32LDDR XRa, Rb, S12 - Load a word from memory to XRF, reversed byte seq.
25063 static void gen_mxu_s32ldd_s32lddr(DisasContext
*ctx
)
25066 uint32_t XRa
, Rb
, s12
, sel
;
25068 t0
= tcg_temp_new();
25069 t1
= tcg_temp_new();
25071 XRa
= extract32(ctx
->opcode
, 6, 4);
25072 s12
= extract32(ctx
->opcode
, 10, 10);
25073 sel
= extract32(ctx
->opcode
, 20, 1);
25074 Rb
= extract32(ctx
->opcode
, 21, 5);
25076 gen_load_gpr(t0
, Rb
);
25078 tcg_gen_movi_tl(t1
, s12
);
25079 tcg_gen_shli_tl(t1
, t1
, 2);
25081 tcg_gen_ori_tl(t1
, t1
, 0xFFFFF000);
25083 tcg_gen_add_tl(t1
, t0
, t1
);
25084 tcg_gen_qemu_ld_tl(t1
, t1
, ctx
->mem_idx
, MO_SL
);
25088 tcg_gen_bswap32_tl(t1
, t1
);
25090 gen_store_mxu_gpr(t1
, XRa
);
25098 * MXU instruction category: logic
25099 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25101 * S32NOR S32AND S32OR S32XOR
25105 * S32NOR XRa, XRb, XRc
25106 * Update XRa with the result of logical bitwise 'nor' operation
25107 * applied to the content of XRb and XRc.
25109 static void gen_mxu_S32NOR(DisasContext
*ctx
)
25111 uint32_t pad
, XRc
, XRb
, XRa
;
25113 pad
= extract32(ctx
->opcode
, 21, 5);
25114 XRc
= extract32(ctx
->opcode
, 14, 4);
25115 XRb
= extract32(ctx
->opcode
, 10, 4);
25116 XRa
= extract32(ctx
->opcode
, 6, 4);
25118 if (unlikely(pad
!= 0)) {
25119 /* opcode padding incorrect -> do nothing */
25120 } else if (unlikely(XRa
== 0)) {
25121 /* destination is zero register -> do nothing */
25122 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25123 /* both operands zero registers -> just set destination to all 1s */
25124 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0xFFFFFFFF);
25125 } else if (unlikely(XRb
== 0)) {
25126 /* XRb zero register -> just set destination to the negation of XRc */
25127 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25128 } else if (unlikely(XRc
== 0)) {
25129 /* XRa zero register -> just set destination to the negation of XRb */
25130 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25131 } else if (unlikely(XRb
== XRc
)) {
25132 /* both operands same -> just set destination to the negation of XRb */
25133 tcg_gen_not_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25135 /* the most general case */
25136 tcg_gen_nor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25141 * S32AND XRa, XRb, XRc
25142 * Update XRa with the result of logical bitwise 'and' operation
25143 * applied to the content of XRb and XRc.
25145 static void gen_mxu_S32AND(DisasContext
*ctx
)
25147 uint32_t pad
, XRc
, XRb
, XRa
;
25149 pad
= extract32(ctx
->opcode
, 21, 5);
25150 XRc
= extract32(ctx
->opcode
, 14, 4);
25151 XRb
= extract32(ctx
->opcode
, 10, 4);
25152 XRa
= extract32(ctx
->opcode
, 6, 4);
25154 if (unlikely(pad
!= 0)) {
25155 /* opcode padding incorrect -> do nothing */
25156 } else if (unlikely(XRa
== 0)) {
25157 /* destination is zero register -> do nothing */
25158 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25159 /* one of operands zero register -> just set destination to all 0s */
25160 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25161 } else if (unlikely(XRb
== XRc
)) {
25162 /* both operands same -> just set destination to one of them */
25163 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25165 /* the most general case */
25166 tcg_gen_and_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25171 * S32OR XRa, XRb, XRc
25172 * Update XRa with the result of logical bitwise 'or' operation
25173 * applied to the content of XRb and XRc.
25175 static void gen_mxu_S32OR(DisasContext
*ctx
)
25177 uint32_t pad
, XRc
, XRb
, XRa
;
25179 pad
= extract32(ctx
->opcode
, 21, 5);
25180 XRc
= extract32(ctx
->opcode
, 14, 4);
25181 XRb
= extract32(ctx
->opcode
, 10, 4);
25182 XRa
= extract32(ctx
->opcode
, 6, 4);
25184 if (unlikely(pad
!= 0)) {
25185 /* opcode padding incorrect -> do nothing */
25186 } else if (unlikely(XRa
== 0)) {
25187 /* destination is zero register -> do nothing */
25188 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25189 /* both operands zero registers -> just set destination to all 0s */
25190 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25191 } else if (unlikely(XRb
== 0)) {
25192 /* XRb zero register -> just set destination to the content of XRc */
25193 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25194 } else if (unlikely(XRc
== 0)) {
25195 /* XRc zero register -> just set destination to the content of XRb */
25196 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25197 } else if (unlikely(XRb
== XRc
)) {
25198 /* both operands same -> just set destination to one of them */
25199 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25201 /* the most general case */
25202 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25207 * S32XOR XRa, XRb, XRc
25208 * Update XRa with the result of logical bitwise 'xor' operation
25209 * applied to the content of XRb and XRc.
25211 static void gen_mxu_S32XOR(DisasContext
*ctx
)
25213 uint32_t pad
, XRc
, XRb
, XRa
;
25215 pad
= extract32(ctx
->opcode
, 21, 5);
25216 XRc
= extract32(ctx
->opcode
, 14, 4);
25217 XRb
= extract32(ctx
->opcode
, 10, 4);
25218 XRa
= extract32(ctx
->opcode
, 6, 4);
25220 if (unlikely(pad
!= 0)) {
25221 /* opcode padding incorrect -> do nothing */
25222 } else if (unlikely(XRa
== 0)) {
25223 /* destination is zero register -> do nothing */
25224 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25225 /* both operands zero registers -> just set destination to all 0s */
25226 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25227 } else if (unlikely(XRb
== 0)) {
25228 /* XRb zero register -> just set destination to the content of XRc */
25229 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25230 } else if (unlikely(XRc
== 0)) {
25231 /* XRc zero register -> just set destination to the content of XRb */
25232 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25233 } else if (unlikely(XRb
== XRc
)) {
25234 /* both operands same -> just set destination to all 0s */
25235 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25237 /* the most general case */
25238 tcg_gen_xor_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], mxu_gpr
[XRc
- 1]);
25244 * MXU instruction category max/min
25245 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25247 * S32MAX D16MAX Q8MAX
25248 * S32MIN D16MIN Q8MIN
25252 * S32MAX XRa, XRb, XRc
25253 * Update XRa with the maximum of signed 32-bit integers contained
25256 * S32MIN XRa, XRb, XRc
25257 * Update XRa with the minimum of signed 32-bit integers contained
25260 static void gen_mxu_S32MAX_S32MIN(DisasContext
*ctx
)
25262 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25264 pad
= extract32(ctx
->opcode
, 21, 5);
25265 opc
= extract32(ctx
->opcode
, 18, 3);
25266 XRc
= extract32(ctx
->opcode
, 14, 4);
25267 XRb
= extract32(ctx
->opcode
, 10, 4);
25268 XRa
= extract32(ctx
->opcode
, 6, 4);
25270 if (unlikely(pad
!= 0)) {
25271 /* opcode padding incorrect -> do nothing */
25272 } else if (unlikely(XRa
== 0)) {
25273 /* destination is zero register -> do nothing */
25274 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25275 /* both operands zero registers -> just set destination to zero */
25276 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25277 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25278 /* exactly one operand is zero register - find which one is not...*/
25279 uint32_t XRx
= XRb
? XRb
: XRc
;
25280 /* ...and do max/min operation with one operand 0 */
25281 if (opc
== OPC_MXU_S32MAX
) {
25282 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25284 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRx
- 1], 0);
25286 } else if (unlikely(XRb
== XRc
)) {
25287 /* both operands same -> just set destination to one of them */
25288 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25290 /* the most general case */
25291 if (opc
== OPC_MXU_S32MAX
) {
25292 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25295 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1],
25303 * Update XRa with the 16-bit-wise maximums of signed integers
25304 * contained in XRb and XRc.
25307 * Update XRa with the 16-bit-wise minimums of signed integers
25308 * contained in XRb and XRc.
25310 static void gen_mxu_D16MAX_D16MIN(DisasContext
*ctx
)
25312 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25314 pad
= extract32(ctx
->opcode
, 21, 5);
25315 opc
= extract32(ctx
->opcode
, 18, 3);
25316 XRc
= extract32(ctx
->opcode
, 14, 4);
25317 XRb
= extract32(ctx
->opcode
, 10, 4);
25318 XRa
= extract32(ctx
->opcode
, 6, 4);
25320 if (unlikely(pad
!= 0)) {
25321 /* opcode padding incorrect -> do nothing */
25322 } else if (unlikely(XRc
== 0)) {
25323 /* destination is zero register -> do nothing */
25324 } else if (unlikely((XRb
== 0) && (XRa
== 0))) {
25325 /* both operands zero registers -> just set destination to zero */
25326 tcg_gen_movi_i32(mxu_gpr
[XRc
- 1], 0);
25327 } else if (unlikely((XRb
== 0) || (XRa
== 0))) {
25328 /* exactly one operand is zero register - find which one is not...*/
25329 uint32_t XRx
= XRb
? XRb
: XRc
;
25330 /* ...and do half-word-wise max/min with one operand 0 */
25331 TCGv_i32 t0
= tcg_temp_new();
25332 TCGv_i32 t1
= tcg_const_i32(0);
25334 /* the left half-word first */
25335 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFFFF0000);
25336 if (opc
== OPC_MXU_D16MAX
) {
25337 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25339 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25342 /* the right half-word */
25343 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0x0000FFFF);
25344 /* move half-words to the leftmost position */
25345 tcg_gen_shli_i32(t0
, t0
, 16);
25346 /* t0 will be max/min of t0 and t1 */
25347 if (opc
== OPC_MXU_D16MAX
) {
25348 tcg_gen_smax_i32(t0
, t0
, t1
);
25350 tcg_gen_smin_i32(t0
, t0
, t1
);
25352 /* return resulting half-words to its original position */
25353 tcg_gen_shri_i32(t0
, t0
, 16);
25354 /* finally update the destination */
25355 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25359 } else if (unlikely(XRb
== XRc
)) {
25360 /* both operands same -> just set destination to one of them */
25361 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25363 /* the most general case */
25364 TCGv_i32 t0
= tcg_temp_new();
25365 TCGv_i32 t1
= tcg_temp_new();
25367 /* the left half-word first */
25368 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFFFF0000);
25369 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25370 if (opc
== OPC_MXU_D16MAX
) {
25371 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25373 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25376 /* the right half-word */
25377 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25378 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0x0000FFFF);
25379 /* move half-words to the leftmost position */
25380 tcg_gen_shli_i32(t0
, t0
, 16);
25381 tcg_gen_shli_i32(t1
, t1
, 16);
25382 /* t0 will be max/min of t0 and t1 */
25383 if (opc
== OPC_MXU_D16MAX
) {
25384 tcg_gen_smax_i32(t0
, t0
, t1
);
25386 tcg_gen_smin_i32(t0
, t0
, t1
);
25388 /* return resulting half-words to its original position */
25389 tcg_gen_shri_i32(t0
, t0
, 16);
25390 /* finally update the destination */
25391 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25400 * Update XRa with the 8-bit-wise maximums of signed integers
25401 * contained in XRb and XRc.
25404 * Update XRa with the 8-bit-wise minimums of signed integers
25405 * contained in XRb and XRc.
25407 static void gen_mxu_Q8MAX_Q8MIN(DisasContext
*ctx
)
25409 uint32_t pad
, opc
, XRc
, XRb
, XRa
;
25411 pad
= extract32(ctx
->opcode
, 21, 5);
25412 opc
= extract32(ctx
->opcode
, 18, 3);
25413 XRc
= extract32(ctx
->opcode
, 14, 4);
25414 XRb
= extract32(ctx
->opcode
, 10, 4);
25415 XRa
= extract32(ctx
->opcode
, 6, 4);
25417 if (unlikely(pad
!= 0)) {
25418 /* opcode padding incorrect -> do nothing */
25419 } else if (unlikely(XRa
== 0)) {
25420 /* destination is zero register -> do nothing */
25421 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25422 /* both operands zero registers -> just set destination to zero */
25423 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25424 } else if (unlikely((XRb
== 0) || (XRc
== 0))) {
25425 /* exactly one operand is zero register - make it be the first...*/
25426 uint32_t XRx
= XRb
? XRb
: XRc
;
25427 /* ...and do byte-wise max/min with one operand 0 */
25428 TCGv_i32 t0
= tcg_temp_new();
25429 TCGv_i32 t1
= tcg_const_i32(0);
25432 /* the leftmost byte (byte 3) first */
25433 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF000000);
25434 if (opc
== OPC_MXU_Q8MAX
) {
25435 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25437 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25440 /* bytes 2, 1, 0 */
25441 for (i
= 2; i
>= 0; i
--) {
25442 /* extract the byte */
25443 tcg_gen_andi_i32(t0
, mxu_gpr
[XRx
- 1], 0xFF << (8 * i
));
25444 /* move the byte to the leftmost position */
25445 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25446 /* t0 will be max/min of t0 and t1 */
25447 if (opc
== OPC_MXU_Q8MAX
) {
25448 tcg_gen_smax_i32(t0
, t0
, t1
);
25450 tcg_gen_smin_i32(t0
, t0
, t1
);
25452 /* return resulting byte to its original position */
25453 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25454 /* finally update the destination */
25455 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25460 } else if (unlikely(XRb
== XRc
)) {
25461 /* both operands same -> just set destination to one of them */
25462 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25464 /* the most general case */
25465 TCGv_i32 t0
= tcg_temp_new();
25466 TCGv_i32 t1
= tcg_temp_new();
25469 /* the leftmost bytes (bytes 3) first */
25470 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF000000);
25471 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25472 if (opc
== OPC_MXU_Q8MAX
) {
25473 tcg_gen_smax_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25475 tcg_gen_smin_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25478 /* bytes 2, 1, 0 */
25479 for (i
= 2; i
>= 0; i
--) {
25480 /* extract corresponding bytes */
25481 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0xFF << (8 * i
));
25482 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF << (8 * i
));
25483 /* move the bytes to the leftmost position */
25484 tcg_gen_shli_i32(t0
, t0
, 8 * (3 - i
));
25485 tcg_gen_shli_i32(t1
, t1
, 8 * (3 - i
));
25486 /* t0 will be max/min of t0 and t1 */
25487 if (opc
== OPC_MXU_Q8MAX
) {
25488 tcg_gen_smax_i32(t0
, t0
, t1
);
25490 tcg_gen_smin_i32(t0
, t0
, t1
);
25492 /* return resulting byte to its original position */
25493 tcg_gen_shri_i32(t0
, t0
, 8 * (3 - i
));
25494 /* finally update the destination */
25495 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRa
- 1], t0
);
25505 * MXU instruction category: align
25506 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
25512 * S32ALNI XRc, XRb, XRa, optn3
25513 * Arrange bytes from XRb and XRc according to one of five sets of
25514 * rules determined by optn3, and place the result in XRa.
25516 static void gen_mxu_S32ALNI(DisasContext
*ctx
)
25518 uint32_t optn3
, pad
, XRc
, XRb
, XRa
;
25520 optn3
= extract32(ctx
->opcode
, 23, 3);
25521 pad
= extract32(ctx
->opcode
, 21, 2);
25522 XRc
= extract32(ctx
->opcode
, 14, 4);
25523 XRb
= extract32(ctx
->opcode
, 10, 4);
25524 XRa
= extract32(ctx
->opcode
, 6, 4);
25526 if (unlikely(pad
!= 0)) {
25527 /* opcode padding incorrect -> do nothing */
25528 } else if (unlikely(XRa
== 0)) {
25529 /* destination is zero register -> do nothing */
25530 } else if (unlikely((XRb
== 0) && (XRc
== 0))) {
25531 /* both operands zero registers -> just set destination to all 0s */
25532 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25533 } else if (unlikely(XRb
== 0)) {
25534 /* XRb zero register -> just appropriatelly shift XRc into XRa */
25536 case MXU_OPTN3_PTN0
:
25537 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25539 case MXU_OPTN3_PTN1
:
25540 case MXU_OPTN3_PTN2
:
25541 case MXU_OPTN3_PTN3
:
25542 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1],
25545 case MXU_OPTN3_PTN4
:
25546 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25549 } else if (unlikely(XRc
== 0)) {
25550 /* XRc zero register -> just appropriatelly shift XRb into XRa */
25552 case MXU_OPTN3_PTN0
:
25553 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25555 case MXU_OPTN3_PTN1
:
25556 case MXU_OPTN3_PTN2
:
25557 case MXU_OPTN3_PTN3
:
25558 tcg_gen_shri_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25560 case MXU_OPTN3_PTN4
:
25561 tcg_gen_movi_i32(mxu_gpr
[XRa
- 1], 0);
25564 } else if (unlikely(XRb
== XRc
)) {
25565 /* both operands same -> just rotation or moving from any of them */
25567 case MXU_OPTN3_PTN0
:
25568 case MXU_OPTN3_PTN4
:
25569 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25571 case MXU_OPTN3_PTN1
:
25572 case MXU_OPTN3_PTN2
:
25573 case MXU_OPTN3_PTN3
:
25574 tcg_gen_rotli_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1], 8 * optn3
);
25578 /* the most general case */
25580 case MXU_OPTN3_PTN0
:
25584 /* +---------------+ */
25585 /* | A B C D | E F G H */
25586 /* +-------+-------+ */
25591 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRb
- 1]);
25594 case MXU_OPTN3_PTN1
:
25598 /* +-------------------+ */
25599 /* A | B C D E | F G H */
25600 /* +---------+---------+ */
25605 TCGv_i32 t0
= tcg_temp_new();
25606 TCGv_i32 t1
= tcg_temp_new();
25608 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x00FFFFFF);
25609 tcg_gen_shli_i32(t0
, t0
, 8);
25611 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFF000000);
25612 tcg_gen_shri_i32(t1
, t1
, 24);
25614 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25620 case MXU_OPTN3_PTN2
:
25624 /* +-------------------+ */
25625 /* A B | C D E F | G H */
25626 /* +---------+---------+ */
25631 TCGv_i32 t0
= tcg_temp_new();
25632 TCGv_i32 t1
= tcg_temp_new();
25634 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x0000FFFF);
25635 tcg_gen_shli_i32(t0
, t0
, 16);
25637 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFF0000);
25638 tcg_gen_shri_i32(t1
, t1
, 16);
25640 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25646 case MXU_OPTN3_PTN3
:
25650 /* +-------------------+ */
25651 /* A B C | D E F G | H */
25652 /* +---------+---------+ */
25657 TCGv_i32 t0
= tcg_temp_new();
25658 TCGv_i32 t1
= tcg_temp_new();
25660 tcg_gen_andi_i32(t0
, mxu_gpr
[XRb
- 1], 0x000000FF);
25661 tcg_gen_shli_i32(t0
, t0
, 24);
25663 tcg_gen_andi_i32(t1
, mxu_gpr
[XRc
- 1], 0xFFFFFF00);
25664 tcg_gen_shri_i32(t1
, t1
, 8);
25666 tcg_gen_or_i32(mxu_gpr
[XRa
- 1], t0
, t1
);
25672 case MXU_OPTN3_PTN4
:
25676 /* +---------------+ */
25677 /* A B C D | E F G H | */
25678 /* +-------+-------+ */
25683 tcg_gen_mov_i32(mxu_gpr
[XRa
- 1], mxu_gpr
[XRc
- 1]);
25692 * Decoding engine for MXU
25693 * =======================
25696 static void decode_opc_mxu__pool00(DisasContext
*ctx
)
25698 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25701 case OPC_MXU_S32MAX
:
25702 case OPC_MXU_S32MIN
:
25703 gen_mxu_S32MAX_S32MIN(ctx
);
25705 case OPC_MXU_D16MAX
:
25706 case OPC_MXU_D16MIN
:
25707 gen_mxu_D16MAX_D16MIN(ctx
);
25709 case OPC_MXU_Q8MAX
:
25710 case OPC_MXU_Q8MIN
:
25711 gen_mxu_Q8MAX_Q8MIN(ctx
);
25714 MIPS_INVAL("decode_opc_mxu");
25715 gen_reserved_instruction(ctx
);
25720 static void decode_opc_mxu__pool04(DisasContext
*ctx
)
25722 uint32_t opcode
= extract32(ctx
->opcode
, 20, 1);
25725 case OPC_MXU_S32LDD
:
25726 case OPC_MXU_S32LDDR
:
25727 gen_mxu_s32ldd_s32lddr(ctx
);
25730 MIPS_INVAL("decode_opc_mxu");
25731 gen_reserved_instruction(ctx
);
25736 static void decode_opc_mxu__pool16(DisasContext
*ctx
)
25738 uint32_t opcode
= extract32(ctx
->opcode
, 18, 3);
25741 case OPC_MXU_S32ALNI
:
25742 gen_mxu_S32ALNI(ctx
);
25744 case OPC_MXU_S32NOR
:
25745 gen_mxu_S32NOR(ctx
);
25747 case OPC_MXU_S32AND
:
25748 gen_mxu_S32AND(ctx
);
25750 case OPC_MXU_S32OR
:
25751 gen_mxu_S32OR(ctx
);
25753 case OPC_MXU_S32XOR
:
25754 gen_mxu_S32XOR(ctx
);
25757 MIPS_INVAL("decode_opc_mxu");
25758 gen_reserved_instruction(ctx
);
25763 static void decode_opc_mxu__pool19(DisasContext
*ctx
)
25765 uint32_t opcode
= extract32(ctx
->opcode
, 22, 2);
25768 case OPC_MXU_Q8MUL
:
25769 case OPC_MXU_Q8MULSU
:
25770 gen_mxu_q8mul_q8mulsu(ctx
);
25773 MIPS_INVAL("decode_opc_mxu");
25774 gen_reserved_instruction(ctx
);
25780 * Main MXU decoding function
25782 static void decode_opc_mxu(DisasContext
*ctx
, uint32_t insn
)
25784 uint32_t opcode
= extract32(insn
, 0, 6);
25786 if (opcode
== OPC_MXU_S32M2I
) {
25787 gen_mxu_s32m2i(ctx
);
25791 if (opcode
== OPC_MXU_S32I2M
) {
25792 gen_mxu_s32i2m(ctx
);
25797 TCGv t_mxu_cr
= tcg_temp_new();
25798 TCGLabel
*l_exit
= gen_new_label();
25800 gen_load_mxu_cr(t_mxu_cr
);
25801 tcg_gen_andi_tl(t_mxu_cr
, t_mxu_cr
, MXU_CR_MXU_EN
);
25802 tcg_gen_brcondi_tl(TCG_COND_NE
, t_mxu_cr
, MXU_CR_MXU_EN
, l_exit
);
25805 case OPC_MXU__POOL00
:
25806 decode_opc_mxu__pool00(ctx
);
25808 case OPC_MXU_D16MUL
:
25809 gen_mxu_d16mul(ctx
);
25811 case OPC_MXU_D16MAC
:
25812 gen_mxu_d16mac(ctx
);
25814 case OPC_MXU__POOL04
:
25815 decode_opc_mxu__pool04(ctx
);
25817 case OPC_MXU_S8LDD
:
25818 gen_mxu_s8ldd(ctx
);
25820 case OPC_MXU__POOL16
:
25821 decode_opc_mxu__pool16(ctx
);
25823 case OPC_MXU__POOL19
:
25824 decode_opc_mxu__pool19(ctx
);
25827 MIPS_INVAL("decode_opc_mxu");
25828 gen_reserved_instruction(ctx
);
25831 gen_set_label(l_exit
);
25832 tcg_temp_free(t_mxu_cr
);
25836 #endif /* !defined(TARGET_MIPS64) */
25839 static void decode_opc_special2_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
25844 rs
= (ctx
->opcode
>> 21) & 0x1f;
25845 rt
= (ctx
->opcode
>> 16) & 0x1f;
25846 rd
= (ctx
->opcode
>> 11) & 0x1f;
25848 op1
= MASK_SPECIAL2(ctx
->opcode
);
25850 case OPC_MADD
: /* Multiply and add/sub */
25854 check_insn(ctx
, ISA_MIPS_R1
);
25855 gen_muldiv(ctx
, op1
, rd
& 3, rs
, rt
);
25858 gen_arith(ctx
, op1
, rd
, rs
, rt
);
25861 case OPC_DIVU_G_2F
:
25862 case OPC_MULT_G_2F
:
25863 case OPC_MULTU_G_2F
:
25865 case OPC_MODU_G_2F
:
25866 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
25867 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25871 check_insn(ctx
, ISA_MIPS_R1
);
25872 gen_cl(ctx
, op1
, rd
, rs
);
25875 if (is_uhi(extract32(ctx
->opcode
, 6, 20))) {
25876 gen_helper_do_semihosting(cpu_env
);
25879 * XXX: not clear which exception should be raised
25880 * when in debug mode...
25882 check_insn(ctx
, ISA_MIPS_R1
);
25883 generate_exception_end(ctx
, EXCP_DBp
);
25886 #if defined(TARGET_MIPS64)
25889 check_insn(ctx
, ISA_MIPS_R1
);
25890 check_mips_64(ctx
);
25891 gen_cl(ctx
, op1
, rd
, rs
);
25893 case OPC_DMULT_G_2F
:
25894 case OPC_DMULTU_G_2F
:
25895 case OPC_DDIV_G_2F
:
25896 case OPC_DDIVU_G_2F
:
25897 case OPC_DMOD_G_2F
:
25898 case OPC_DMODU_G_2F
:
25899 check_insn(ctx
, INSN_LOONGSON2F
| ASE_LEXT
);
25900 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
25903 default: /* Invalid */
25904 MIPS_INVAL("special2_legacy");
25905 gen_reserved_instruction(ctx
);
25910 static void decode_opc_special3_r6(CPUMIPSState
*env
, DisasContext
*ctx
)
25912 int rs
, rt
, rd
, sa
;
25916 rs
= (ctx
->opcode
>> 21) & 0x1f;
25917 rt
= (ctx
->opcode
>> 16) & 0x1f;
25918 rd
= (ctx
->opcode
>> 11) & 0x1f;
25919 sa
= (ctx
->opcode
>> 6) & 0x1f;
25920 imm
= (int16_t)ctx
->opcode
>> 7;
25922 op1
= MASK_SPECIAL3(ctx
->opcode
);
25926 /* hint codes 24-31 are reserved and signal RI */
25927 gen_reserved_instruction(ctx
);
25929 /* Treat as NOP. */
25932 check_cp0_enabled(ctx
);
25933 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
25934 gen_cache_operation(ctx
, rt
, rs
, imm
);
25938 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
25941 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25946 /* Treat as NOP. */
25949 op2
= MASK_BSHFL(ctx
->opcode
);
25955 gen_align(ctx
, 32, rd
, rs
, rt
, sa
& 3);
25958 gen_bitswap(ctx
, op2
, rd
, rt
);
25963 #ifndef CONFIG_USER_ONLY
25965 if (unlikely(ctx
->gi
<= 1)) {
25966 gen_reserved_instruction(ctx
);
25968 check_cp0_enabled(ctx
);
25969 switch ((ctx
->opcode
>> 6) & 3) {
25970 case 0: /* GINVI */
25971 /* Treat as NOP. */
25973 case 2: /* GINVT */
25974 gen_helper_0e1i(ginvt
, cpu_gpr
[rs
], extract32(ctx
->opcode
, 8, 2));
25977 gen_reserved_instruction(ctx
);
25982 #if defined(TARGET_MIPS64)
25984 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
25987 gen_ld(ctx
, op1
, rt
, rs
, imm
);
25990 check_mips_64(ctx
);
25993 /* Treat as NOP. */
25996 op2
= MASK_DBSHFL(ctx
->opcode
);
26006 gen_align(ctx
, 64, rd
, rs
, rt
, sa
& 7);
26009 gen_bitswap(ctx
, op2
, rd
, rt
);
26016 default: /* Invalid */
26017 MIPS_INVAL("special3_r6");
26018 gen_reserved_instruction(ctx
);
26023 static void decode_opc_special3_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26028 rs
= (ctx
->opcode
>> 21) & 0x1f;
26029 rt
= (ctx
->opcode
>> 16) & 0x1f;
26030 rd
= (ctx
->opcode
>> 11) & 0x1f;
26032 op1
= MASK_SPECIAL3(ctx
->opcode
);
26035 case OPC_DIVU_G_2E
:
26037 case OPC_MODU_G_2E
:
26038 case OPC_MULT_G_2E
:
26039 case OPC_MULTU_G_2E
:
26041 * OPC_MULT_G_2E, OPC_ADDUH_QB_DSP, OPC_MUL_PH_DSP have
26042 * the same mask and op1.
26044 if ((ctx
->insn_flags
& ASE_DSP_R2
) && (op1
== OPC_MULT_G_2E
)) {
26045 op2
= MASK_ADDUH_QB(ctx
->opcode
);
26048 case OPC_ADDUH_R_QB
:
26050 case OPC_ADDQH_R_PH
:
26052 case OPC_ADDQH_R_W
:
26054 case OPC_SUBUH_R_QB
:
26056 case OPC_SUBQH_R_PH
:
26058 case OPC_SUBQH_R_W
:
26059 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26064 case OPC_MULQ_RS_W
:
26065 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26068 MIPS_INVAL("MASK ADDUH.QB");
26069 gen_reserved_instruction(ctx
);
26072 } else if (ctx
->insn_flags
& INSN_LOONGSON2E
) {
26073 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26075 gen_reserved_instruction(ctx
);
26079 op2
= MASK_LX(ctx
->opcode
);
26081 #if defined(TARGET_MIPS64)
26087 gen_mipsdsp_ld(ctx
, op2
, rd
, rs
, rt
);
26089 default: /* Invalid */
26090 MIPS_INVAL("MASK LX");
26091 gen_reserved_instruction(ctx
);
26095 case OPC_ABSQ_S_PH_DSP
:
26096 op2
= MASK_ABSQ_S_PH(ctx
->opcode
);
26098 case OPC_ABSQ_S_QB
:
26099 case OPC_ABSQ_S_PH
:
26101 case OPC_PRECEQ_W_PHL
:
26102 case OPC_PRECEQ_W_PHR
:
26103 case OPC_PRECEQU_PH_QBL
:
26104 case OPC_PRECEQU_PH_QBR
:
26105 case OPC_PRECEQU_PH_QBLA
:
26106 case OPC_PRECEQU_PH_QBRA
:
26107 case OPC_PRECEU_PH_QBL
:
26108 case OPC_PRECEU_PH_QBR
:
26109 case OPC_PRECEU_PH_QBLA
:
26110 case OPC_PRECEU_PH_QBRA
:
26111 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26118 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26121 MIPS_INVAL("MASK ABSQ_S.PH");
26122 gen_reserved_instruction(ctx
);
26126 case OPC_ADDU_QB_DSP
:
26127 op2
= MASK_ADDU_QB(ctx
->opcode
);
26130 case OPC_ADDQ_S_PH
:
26133 case OPC_ADDU_S_QB
:
26135 case OPC_ADDU_S_PH
:
26137 case OPC_SUBQ_S_PH
:
26140 case OPC_SUBU_S_QB
:
26142 case OPC_SUBU_S_PH
:
26146 case OPC_RADDU_W_QB
:
26147 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26149 case OPC_MULEU_S_PH_QBL
:
26150 case OPC_MULEU_S_PH_QBR
:
26151 case OPC_MULQ_RS_PH
:
26152 case OPC_MULEQ_S_W_PHL
:
26153 case OPC_MULEQ_S_W_PHR
:
26154 case OPC_MULQ_S_PH
:
26155 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26157 default: /* Invalid */
26158 MIPS_INVAL("MASK ADDU.QB");
26159 gen_reserved_instruction(ctx
);
26164 case OPC_CMPU_EQ_QB_DSP
:
26165 op2
= MASK_CMPU_EQ_QB(ctx
->opcode
);
26167 case OPC_PRECR_SRA_PH_W
:
26168 case OPC_PRECR_SRA_R_PH_W
:
26169 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26171 case OPC_PRECR_QB_PH
:
26172 case OPC_PRECRQ_QB_PH
:
26173 case OPC_PRECRQ_PH_W
:
26174 case OPC_PRECRQ_RS_PH_W
:
26175 case OPC_PRECRQU_S_QB_PH
:
26176 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26178 case OPC_CMPU_EQ_QB
:
26179 case OPC_CMPU_LT_QB
:
26180 case OPC_CMPU_LE_QB
:
26181 case OPC_CMP_EQ_PH
:
26182 case OPC_CMP_LT_PH
:
26183 case OPC_CMP_LE_PH
:
26184 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26186 case OPC_CMPGU_EQ_QB
:
26187 case OPC_CMPGU_LT_QB
:
26188 case OPC_CMPGU_LE_QB
:
26189 case OPC_CMPGDU_EQ_QB
:
26190 case OPC_CMPGDU_LT_QB
:
26191 case OPC_CMPGDU_LE_QB
:
26194 case OPC_PACKRL_PH
:
26195 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26197 default: /* Invalid */
26198 MIPS_INVAL("MASK CMPU.EQ.QB");
26199 gen_reserved_instruction(ctx
);
26203 case OPC_SHLL_QB_DSP
:
26204 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26206 case OPC_DPA_W_PH_DSP
:
26207 op2
= MASK_DPA_W_PH(ctx
->opcode
);
26209 case OPC_DPAU_H_QBL
:
26210 case OPC_DPAU_H_QBR
:
26211 case OPC_DPSU_H_QBL
:
26212 case OPC_DPSU_H_QBR
:
26214 case OPC_DPAX_W_PH
:
26215 case OPC_DPAQ_S_W_PH
:
26216 case OPC_DPAQX_S_W_PH
:
26217 case OPC_DPAQX_SA_W_PH
:
26219 case OPC_DPSX_W_PH
:
26220 case OPC_DPSQ_S_W_PH
:
26221 case OPC_DPSQX_S_W_PH
:
26222 case OPC_DPSQX_SA_W_PH
:
26223 case OPC_MULSAQ_S_W_PH
:
26224 case OPC_DPAQ_SA_L_W
:
26225 case OPC_DPSQ_SA_L_W
:
26226 case OPC_MAQ_S_W_PHL
:
26227 case OPC_MAQ_S_W_PHR
:
26228 case OPC_MAQ_SA_W_PHL
:
26229 case OPC_MAQ_SA_W_PHR
:
26230 case OPC_MULSA_W_PH
:
26231 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26233 default: /* Invalid */
26234 MIPS_INVAL("MASK DPAW.PH");
26235 gen_reserved_instruction(ctx
);
26240 op2
= MASK_INSV(ctx
->opcode
);
26251 t0
= tcg_temp_new();
26252 t1
= tcg_temp_new();
26254 gen_load_gpr(t0
, rt
);
26255 gen_load_gpr(t1
, rs
);
26257 gen_helper_insv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26263 default: /* Invalid */
26264 MIPS_INVAL("MASK INSV");
26265 gen_reserved_instruction(ctx
);
26269 case OPC_APPEND_DSP
:
26270 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26272 case OPC_EXTR_W_DSP
:
26273 op2
= MASK_EXTR_W(ctx
->opcode
);
26277 case OPC_EXTR_RS_W
:
26279 case OPC_EXTRV_S_H
:
26281 case OPC_EXTRV_R_W
:
26282 case OPC_EXTRV_RS_W
:
26287 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26290 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26296 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26298 default: /* Invalid */
26299 MIPS_INVAL("MASK EXTR.W");
26300 gen_reserved_instruction(ctx
);
26304 #if defined(TARGET_MIPS64)
26305 case OPC_DDIV_G_2E
:
26306 case OPC_DDIVU_G_2E
:
26307 case OPC_DMULT_G_2E
:
26308 case OPC_DMULTU_G_2E
:
26309 case OPC_DMOD_G_2E
:
26310 case OPC_DMODU_G_2E
:
26311 check_insn(ctx
, INSN_LOONGSON2E
);
26312 gen_loongson_integer(ctx
, op1
, rd
, rs
, rt
);
26314 case OPC_ABSQ_S_QH_DSP
:
26315 op2
= MASK_ABSQ_S_QH(ctx
->opcode
);
26317 case OPC_PRECEQ_L_PWL
:
26318 case OPC_PRECEQ_L_PWR
:
26319 case OPC_PRECEQ_PW_QHL
:
26320 case OPC_PRECEQ_PW_QHR
:
26321 case OPC_PRECEQ_PW_QHLA
:
26322 case OPC_PRECEQ_PW_QHRA
:
26323 case OPC_PRECEQU_QH_OBL
:
26324 case OPC_PRECEQU_QH_OBR
:
26325 case OPC_PRECEQU_QH_OBLA
:
26326 case OPC_PRECEQU_QH_OBRA
:
26327 case OPC_PRECEU_QH_OBL
:
26328 case OPC_PRECEU_QH_OBR
:
26329 case OPC_PRECEU_QH_OBLA
:
26330 case OPC_PRECEU_QH_OBRA
:
26331 case OPC_ABSQ_S_OB
:
26332 case OPC_ABSQ_S_PW
:
26333 case OPC_ABSQ_S_QH
:
26334 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26342 gen_mipsdsp_bitinsn(ctx
, op1
, op2
, rd
, rt
);
26344 default: /* Invalid */
26345 MIPS_INVAL("MASK ABSQ_S.QH");
26346 gen_reserved_instruction(ctx
);
26350 case OPC_ADDU_OB_DSP
:
26351 op2
= MASK_ADDU_OB(ctx
->opcode
);
26353 case OPC_RADDU_L_OB
:
26355 case OPC_SUBQ_S_PW
:
26357 case OPC_SUBQ_S_QH
:
26359 case OPC_SUBU_S_OB
:
26361 case OPC_SUBU_S_QH
:
26363 case OPC_SUBUH_R_OB
:
26365 case OPC_ADDQ_S_PW
:
26367 case OPC_ADDQ_S_QH
:
26369 case OPC_ADDU_S_OB
:
26371 case OPC_ADDU_S_QH
:
26373 case OPC_ADDUH_R_OB
:
26374 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26376 case OPC_MULEQ_S_PW_QHL
:
26377 case OPC_MULEQ_S_PW_QHR
:
26378 case OPC_MULEU_S_QH_OBL
:
26379 case OPC_MULEU_S_QH_OBR
:
26380 case OPC_MULQ_RS_QH
:
26381 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26383 default: /* Invalid */
26384 MIPS_INVAL("MASK ADDU.OB");
26385 gen_reserved_instruction(ctx
);
26389 case OPC_CMPU_EQ_OB_DSP
:
26390 op2
= MASK_CMPU_EQ_OB(ctx
->opcode
);
26392 case OPC_PRECR_SRA_QH_PW
:
26393 case OPC_PRECR_SRA_R_QH_PW
:
26394 /* Return value is rt. */
26395 gen_mipsdsp_arith(ctx
, op1
, op2
, rt
, rs
, rd
);
26397 case OPC_PRECR_OB_QH
:
26398 case OPC_PRECRQ_OB_QH
:
26399 case OPC_PRECRQ_PW_L
:
26400 case OPC_PRECRQ_QH_PW
:
26401 case OPC_PRECRQ_RS_QH_PW
:
26402 case OPC_PRECRQU_S_OB_QH
:
26403 gen_mipsdsp_arith(ctx
, op1
, op2
, rd
, rs
, rt
);
26405 case OPC_CMPU_EQ_OB
:
26406 case OPC_CMPU_LT_OB
:
26407 case OPC_CMPU_LE_OB
:
26408 case OPC_CMP_EQ_QH
:
26409 case OPC_CMP_LT_QH
:
26410 case OPC_CMP_LE_QH
:
26411 case OPC_CMP_EQ_PW
:
26412 case OPC_CMP_LT_PW
:
26413 case OPC_CMP_LE_PW
:
26414 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26416 case OPC_CMPGDU_EQ_OB
:
26417 case OPC_CMPGDU_LT_OB
:
26418 case OPC_CMPGDU_LE_OB
:
26419 case OPC_CMPGU_EQ_OB
:
26420 case OPC_CMPGU_LT_OB
:
26421 case OPC_CMPGU_LE_OB
:
26422 case OPC_PACKRL_PW
:
26426 gen_mipsdsp_add_cmp_pick(ctx
, op1
, op2
, rd
, rs
, rt
, 1);
26428 default: /* Invalid */
26429 MIPS_INVAL("MASK CMPU_EQ.OB");
26430 gen_reserved_instruction(ctx
);
26434 case OPC_DAPPEND_DSP
:
26435 gen_mipsdsp_append(env
, ctx
, op1
, rt
, rs
, rd
);
26437 case OPC_DEXTR_W_DSP
:
26438 op2
= MASK_DEXTR_W(ctx
->opcode
);
26445 case OPC_DEXTR_R_L
:
26446 case OPC_DEXTR_RS_L
:
26448 case OPC_DEXTR_R_W
:
26449 case OPC_DEXTR_RS_W
:
26450 case OPC_DEXTR_S_H
:
26452 case OPC_DEXTRV_R_L
:
26453 case OPC_DEXTRV_RS_L
:
26454 case OPC_DEXTRV_S_H
:
26456 case OPC_DEXTRV_R_W
:
26457 case OPC_DEXTRV_RS_W
:
26458 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rt
, rs
, rd
, 1);
26463 gen_mipsdsp_accinsn(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26465 default: /* Invalid */
26466 MIPS_INVAL("MASK EXTR.W");
26467 gen_reserved_instruction(ctx
);
26471 case OPC_DPAQ_W_QH_DSP
:
26472 op2
= MASK_DPAQ_W_QH(ctx
->opcode
);
26474 case OPC_DPAU_H_OBL
:
26475 case OPC_DPAU_H_OBR
:
26476 case OPC_DPSU_H_OBL
:
26477 case OPC_DPSU_H_OBR
:
26479 case OPC_DPAQ_S_W_QH
:
26481 case OPC_DPSQ_S_W_QH
:
26482 case OPC_MULSAQ_S_W_QH
:
26483 case OPC_DPAQ_SA_L_PW
:
26484 case OPC_DPSQ_SA_L_PW
:
26485 case OPC_MULSAQ_S_L_PW
:
26486 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26488 case OPC_MAQ_S_W_QHLL
:
26489 case OPC_MAQ_S_W_QHLR
:
26490 case OPC_MAQ_S_W_QHRL
:
26491 case OPC_MAQ_S_W_QHRR
:
26492 case OPC_MAQ_SA_W_QHLL
:
26493 case OPC_MAQ_SA_W_QHLR
:
26494 case OPC_MAQ_SA_W_QHRL
:
26495 case OPC_MAQ_SA_W_QHRR
:
26496 case OPC_MAQ_S_L_PWL
:
26497 case OPC_MAQ_S_L_PWR
:
26502 gen_mipsdsp_multiply(ctx
, op1
, op2
, rd
, rs
, rt
, 0);
26504 default: /* Invalid */
26505 MIPS_INVAL("MASK DPAQ.W.QH");
26506 gen_reserved_instruction(ctx
);
26510 case OPC_DINSV_DSP
:
26511 op2
= MASK_INSV(ctx
->opcode
);
26522 t0
= tcg_temp_new();
26523 t1
= tcg_temp_new();
26525 gen_load_gpr(t0
, rt
);
26526 gen_load_gpr(t1
, rs
);
26528 gen_helper_dinsv(cpu_gpr
[rt
], cpu_env
, t1
, t0
);
26534 default: /* Invalid */
26535 MIPS_INVAL("MASK DINSV");
26536 gen_reserved_instruction(ctx
);
26540 case OPC_SHLL_OB_DSP
:
26541 gen_mipsdsp_shift(ctx
, op1
, rd
, rs
, rt
);
26544 default: /* Invalid */
26545 MIPS_INVAL("special3_legacy");
26546 gen_reserved_instruction(ctx
);
26552 #if defined(TARGET_MIPS64)
26554 static void decode_mmi0(CPUMIPSState
*env
, DisasContext
*ctx
)
26556 uint32_t opc
= MASK_MMI0(ctx
->opcode
);
26559 case MMI_OPC_0_PADDW
: /* TODO: MMI_OPC_0_PADDW */
26560 case MMI_OPC_0_PSUBW
: /* TODO: MMI_OPC_0_PSUBW */
26561 case MMI_OPC_0_PCGTW
: /* TODO: MMI_OPC_0_PCGTW */
26562 case MMI_OPC_0_PMAXW
: /* TODO: MMI_OPC_0_PMAXW */
26563 case MMI_OPC_0_PADDH
: /* TODO: MMI_OPC_0_PADDH */
26564 case MMI_OPC_0_PSUBH
: /* TODO: MMI_OPC_0_PSUBH */
26565 case MMI_OPC_0_PCGTH
: /* TODO: MMI_OPC_0_PCGTH */
26566 case MMI_OPC_0_PMAXH
: /* TODO: MMI_OPC_0_PMAXH */
26567 case MMI_OPC_0_PADDB
: /* TODO: MMI_OPC_0_PADDB */
26568 case MMI_OPC_0_PSUBB
: /* TODO: MMI_OPC_0_PSUBB */
26569 case MMI_OPC_0_PCGTB
: /* TODO: MMI_OPC_0_PCGTB */
26570 case MMI_OPC_0_PADDSW
: /* TODO: MMI_OPC_0_PADDSW */
26571 case MMI_OPC_0_PSUBSW
: /* TODO: MMI_OPC_0_PSUBSW */
26572 case MMI_OPC_0_PEXTLW
: /* TODO: MMI_OPC_0_PEXTLW */
26573 case MMI_OPC_0_PPACW
: /* TODO: MMI_OPC_0_PPACW */
26574 case MMI_OPC_0_PADDSH
: /* TODO: MMI_OPC_0_PADDSH */
26575 case MMI_OPC_0_PSUBSH
: /* TODO: MMI_OPC_0_PSUBSH */
26576 case MMI_OPC_0_PEXTLH
: /* TODO: MMI_OPC_0_PEXTLH */
26577 case MMI_OPC_0_PPACH
: /* TODO: MMI_OPC_0_PPACH */
26578 case MMI_OPC_0_PADDSB
: /* TODO: MMI_OPC_0_PADDSB */
26579 case MMI_OPC_0_PSUBSB
: /* TODO: MMI_OPC_0_PSUBSB */
26580 case MMI_OPC_0_PEXTLB
: /* TODO: MMI_OPC_0_PEXTLB */
26581 case MMI_OPC_0_PPACB
: /* TODO: MMI_OPC_0_PPACB */
26582 case MMI_OPC_0_PEXT5
: /* TODO: MMI_OPC_0_PEXT5 */
26583 case MMI_OPC_0_PPAC5
: /* TODO: MMI_OPC_0_PPAC5 */
26584 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_CLASS_MMI0 */
26587 MIPS_INVAL("TX79 MMI class MMI0");
26588 gen_reserved_instruction(ctx
);
26593 static void decode_mmi1(CPUMIPSState
*env
, DisasContext
*ctx
)
26595 uint32_t opc
= MASK_MMI1(ctx
->opcode
);
26598 case MMI_OPC_1_PABSW
: /* TODO: MMI_OPC_1_PABSW */
26599 case MMI_OPC_1_PCEQW
: /* TODO: MMI_OPC_1_PCEQW */
26600 case MMI_OPC_1_PMINW
: /* TODO: MMI_OPC_1_PMINW */
26601 case MMI_OPC_1_PADSBH
: /* TODO: MMI_OPC_1_PADSBH */
26602 case MMI_OPC_1_PABSH
: /* TODO: MMI_OPC_1_PABSH */
26603 case MMI_OPC_1_PCEQH
: /* TODO: MMI_OPC_1_PCEQH */
26604 case MMI_OPC_1_PMINH
: /* TODO: MMI_OPC_1_PMINH */
26605 case MMI_OPC_1_PCEQB
: /* TODO: MMI_OPC_1_PCEQB */
26606 case MMI_OPC_1_PADDUW
: /* TODO: MMI_OPC_1_PADDUW */
26607 case MMI_OPC_1_PSUBUW
: /* TODO: MMI_OPC_1_PSUBUW */
26608 case MMI_OPC_1_PEXTUW
: /* TODO: MMI_OPC_1_PEXTUW */
26609 case MMI_OPC_1_PADDUH
: /* TODO: MMI_OPC_1_PADDUH */
26610 case MMI_OPC_1_PSUBUH
: /* TODO: MMI_OPC_1_PSUBUH */
26611 case MMI_OPC_1_PEXTUH
: /* TODO: MMI_OPC_1_PEXTUH */
26612 case MMI_OPC_1_PADDUB
: /* TODO: MMI_OPC_1_PADDUB */
26613 case MMI_OPC_1_PSUBUB
: /* TODO: MMI_OPC_1_PSUBUB */
26614 case MMI_OPC_1_PEXTUB
: /* TODO: MMI_OPC_1_PEXTUB */
26615 case MMI_OPC_1_QFSRV
: /* TODO: MMI_OPC_1_QFSRV */
26616 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_CLASS_MMI1 */
26619 MIPS_INVAL("TX79 MMI class MMI1");
26620 gen_reserved_instruction(ctx
);
26625 static void decode_mmi2(CPUMIPSState
*env
, DisasContext
*ctx
)
26627 uint32_t opc
= MASK_MMI2(ctx
->opcode
);
26630 case MMI_OPC_2_PMADDW
: /* TODO: MMI_OPC_2_PMADDW */
26631 case MMI_OPC_2_PSLLVW
: /* TODO: MMI_OPC_2_PSLLVW */
26632 case MMI_OPC_2_PSRLVW
: /* TODO: MMI_OPC_2_PSRLVW */
26633 case MMI_OPC_2_PMSUBW
: /* TODO: MMI_OPC_2_PMSUBW */
26634 case MMI_OPC_2_PMFHI
: /* TODO: MMI_OPC_2_PMFHI */
26635 case MMI_OPC_2_PMFLO
: /* TODO: MMI_OPC_2_PMFLO */
26636 case MMI_OPC_2_PINTH
: /* TODO: MMI_OPC_2_PINTH */
26637 case MMI_OPC_2_PMULTW
: /* TODO: MMI_OPC_2_PMULTW */
26638 case MMI_OPC_2_PDIVW
: /* TODO: MMI_OPC_2_PDIVW */
26639 case MMI_OPC_2_PMADDH
: /* TODO: MMI_OPC_2_PMADDH */
26640 case MMI_OPC_2_PHMADH
: /* TODO: MMI_OPC_2_PHMADH */
26641 case MMI_OPC_2_PAND
: /* TODO: MMI_OPC_2_PAND */
26642 case MMI_OPC_2_PXOR
: /* TODO: MMI_OPC_2_PXOR */
26643 case MMI_OPC_2_PMSUBH
: /* TODO: MMI_OPC_2_PMSUBH */
26644 case MMI_OPC_2_PHMSBH
: /* TODO: MMI_OPC_2_PHMSBH */
26645 case MMI_OPC_2_PEXEH
: /* TODO: MMI_OPC_2_PEXEH */
26646 case MMI_OPC_2_PREVH
: /* TODO: MMI_OPC_2_PREVH */
26647 case MMI_OPC_2_PMULTH
: /* TODO: MMI_OPC_2_PMULTH */
26648 case MMI_OPC_2_PDIVBW
: /* TODO: MMI_OPC_2_PDIVBW */
26649 case MMI_OPC_2_PEXEW
: /* TODO: MMI_OPC_2_PEXEW */
26650 case MMI_OPC_2_PROT3W
: /* TODO: MMI_OPC_2_PROT3W */
26651 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_CLASS_MMI2 */
26653 case MMI_OPC_2_PCPYLD
:
26654 gen_mmi_pcpyld(ctx
);
26657 MIPS_INVAL("TX79 MMI class MMI2");
26658 gen_reserved_instruction(ctx
);
26663 static void decode_mmi3(CPUMIPSState
*env
, DisasContext
*ctx
)
26665 uint32_t opc
= MASK_MMI3(ctx
->opcode
);
26668 case MMI_OPC_3_PMADDUW
: /* TODO: MMI_OPC_3_PMADDUW */
26669 case MMI_OPC_3_PSRAVW
: /* TODO: MMI_OPC_3_PSRAVW */
26670 case MMI_OPC_3_PMTHI
: /* TODO: MMI_OPC_3_PMTHI */
26671 case MMI_OPC_3_PMTLO
: /* TODO: MMI_OPC_3_PMTLO */
26672 case MMI_OPC_3_PINTEH
: /* TODO: MMI_OPC_3_PINTEH */
26673 case MMI_OPC_3_PMULTUW
: /* TODO: MMI_OPC_3_PMULTUW */
26674 case MMI_OPC_3_PDIVUW
: /* TODO: MMI_OPC_3_PDIVUW */
26675 case MMI_OPC_3_POR
: /* TODO: MMI_OPC_3_POR */
26676 case MMI_OPC_3_PNOR
: /* TODO: MMI_OPC_3_PNOR */
26677 case MMI_OPC_3_PEXCH
: /* TODO: MMI_OPC_3_PEXCH */
26678 case MMI_OPC_3_PEXCW
: /* TODO: MMI_OPC_3_PEXCW */
26679 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_CLASS_MMI3 */
26681 case MMI_OPC_3_PCPYH
:
26682 gen_mmi_pcpyh(ctx
);
26684 case MMI_OPC_3_PCPYUD
:
26685 gen_mmi_pcpyud(ctx
);
26688 MIPS_INVAL("TX79 MMI class MMI3");
26689 gen_reserved_instruction(ctx
);
26694 static void decode_mmi(CPUMIPSState
*env
, DisasContext
*ctx
)
26696 uint32_t opc
= MASK_MMI(ctx
->opcode
);
26697 int rs
= extract32(ctx
->opcode
, 21, 5);
26698 int rt
= extract32(ctx
->opcode
, 16, 5);
26699 int rd
= extract32(ctx
->opcode
, 11, 5);
26702 case MMI_OPC_CLASS_MMI0
:
26703 decode_mmi0(env
, ctx
);
26705 case MMI_OPC_CLASS_MMI1
:
26706 decode_mmi1(env
, ctx
);
26708 case MMI_OPC_CLASS_MMI2
:
26709 decode_mmi2(env
, ctx
);
26711 case MMI_OPC_CLASS_MMI3
:
26712 decode_mmi3(env
, ctx
);
26714 case MMI_OPC_MULT1
:
26715 case MMI_OPC_MULTU1
:
26717 case MMI_OPC_MADDU
:
26718 case MMI_OPC_MADD1
:
26719 case MMI_OPC_MADDU1
:
26720 gen_mul_txx9(ctx
, opc
, rd
, rs
, rt
);
26723 case MMI_OPC_DIVU1
:
26724 gen_div1_tx79(ctx
, opc
, rs
, rt
);
26726 case MMI_OPC_MTLO1
:
26727 case MMI_OPC_MTHI1
:
26728 gen_HILO1_tx79(ctx
, opc
, rs
);
26730 case MMI_OPC_MFLO1
:
26731 case MMI_OPC_MFHI1
:
26732 gen_HILO1_tx79(ctx
, opc
, rd
);
26734 case MMI_OPC_PLZCW
: /* TODO: MMI_OPC_PLZCW */
26735 case MMI_OPC_PMFHL
: /* TODO: MMI_OPC_PMFHL */
26736 case MMI_OPC_PMTHL
: /* TODO: MMI_OPC_PMTHL */
26737 case MMI_OPC_PSLLH
: /* TODO: MMI_OPC_PSLLH */
26738 case MMI_OPC_PSRLH
: /* TODO: MMI_OPC_PSRLH */
26739 case MMI_OPC_PSRAH
: /* TODO: MMI_OPC_PSRAH */
26740 case MMI_OPC_PSLLW
: /* TODO: MMI_OPC_PSLLW */
26741 case MMI_OPC_PSRLW
: /* TODO: MMI_OPC_PSRLW */
26742 case MMI_OPC_PSRAW
: /* TODO: MMI_OPC_PSRAW */
26743 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_CLASS_MMI */
26746 MIPS_INVAL("TX79 MMI class");
26747 gen_reserved_instruction(ctx
);
26752 static void gen_mmi_lq(CPUMIPSState
*env
, DisasContext
*ctx
)
26754 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_LQ */
26757 static void gen_mmi_sq(DisasContext
*ctx
, int base
, int rt
, int offset
)
26759 gen_reserved_instruction(ctx
); /* TODO: MMI_OPC_SQ */
26763 * The TX79-specific instruction Store Quadword
26765 * +--------+-------+-------+------------------------+
26766 * | 011111 | base | rt | offset | SQ
26767 * +--------+-------+-------+------------------------+
26770 * has the same opcode as the Read Hardware Register instruction
26772 * +--------+-------+-------+-------+-------+--------+
26773 * | 011111 | 00000 | rt | rd | 00000 | 111011 | RDHWR
26774 * +--------+-------+-------+-------+-------+--------+
26777 * that is required, trapped and emulated by the Linux kernel. However, all
26778 * RDHWR encodings yield address error exceptions on the TX79 since the SQ
26779 * offset is odd. Therefore all valid SQ instructions can execute normally.
26780 * In user mode, QEMU must verify the upper and lower 11 bits to distinguish
26781 * between SQ and RDHWR, as the Linux kernel does.
26783 static void decode_mmi_sq(CPUMIPSState
*env
, DisasContext
*ctx
)
26785 int base
= extract32(ctx
->opcode
, 21, 5);
26786 int rt
= extract32(ctx
->opcode
, 16, 5);
26787 int offset
= extract32(ctx
->opcode
, 0, 16);
26789 #ifdef CONFIG_USER_ONLY
26790 uint32_t op1
= MASK_SPECIAL3(ctx
->opcode
);
26791 uint32_t op2
= extract32(ctx
->opcode
, 6, 5);
26793 if (base
== 0 && op2
== 0 && op1
== OPC_RDHWR
) {
26794 int rd
= extract32(ctx
->opcode
, 11, 5);
26796 gen_rdhwr(ctx
, rt
, rd
, 0);
26801 gen_mmi_sq(ctx
, base
, rt
, offset
);
26806 static void decode_opc_special3(CPUMIPSState
*env
, DisasContext
*ctx
)
26808 int rs
, rt
, rd
, sa
;
26812 rs
= (ctx
->opcode
>> 21) & 0x1f;
26813 rt
= (ctx
->opcode
>> 16) & 0x1f;
26814 rd
= (ctx
->opcode
>> 11) & 0x1f;
26815 sa
= (ctx
->opcode
>> 6) & 0x1f;
26816 imm
= sextract32(ctx
->opcode
, 7, 9);
26818 op1
= MASK_SPECIAL3(ctx
->opcode
);
26821 * EVA loads and stores overlap Loongson 2E instructions decoded by
26822 * decode_opc_special3_legacy(), so be careful to allow their decoding when
26835 check_cp0_enabled(ctx
);
26836 gen_ld(ctx
, op1
, rt
, rs
, imm
);
26843 check_cp0_enabled(ctx
);
26844 gen_st(ctx
, op1
, rt
, rs
, imm
);
26847 check_cp0_enabled(ctx
);
26848 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, true);
26851 check_cp0_enabled(ctx
);
26852 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
26853 gen_cache_operation(ctx
, rt
, rs
, imm
);
26855 /* Treat as NOP. */
26858 check_cp0_enabled(ctx
);
26859 /* Treat as NOP. */
26867 check_insn(ctx
, ISA_MIPS_R2
);
26868 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26871 op2
= MASK_BSHFL(ctx
->opcode
);
26878 check_insn(ctx
, ISA_MIPS_R6
);
26879 decode_opc_special3_r6(env
, ctx
);
26882 check_insn(ctx
, ISA_MIPS_R2
);
26883 gen_bshfl(ctx
, op2
, rt
, rd
);
26887 #if defined(TARGET_MIPS64)
26894 check_insn(ctx
, ISA_MIPS_R2
);
26895 check_mips_64(ctx
);
26896 gen_bitops(ctx
, op1
, rt
, rs
, sa
, rd
);
26899 op2
= MASK_DBSHFL(ctx
->opcode
);
26910 check_insn(ctx
, ISA_MIPS_R6
);
26911 decode_opc_special3_r6(env
, ctx
);
26914 check_insn(ctx
, ISA_MIPS_R2
);
26915 check_mips_64(ctx
);
26916 op2
= MASK_DBSHFL(ctx
->opcode
);
26917 gen_bshfl(ctx
, op2
, rt
, rd
);
26923 gen_rdhwr(ctx
, rt
, rd
, extract32(ctx
->opcode
, 6, 3));
26928 TCGv t0
= tcg_temp_new();
26929 TCGv t1
= tcg_temp_new();
26931 gen_load_gpr(t0
, rt
);
26932 gen_load_gpr(t1
, rs
);
26933 gen_helper_fork(t0
, t1
);
26941 TCGv t0
= tcg_temp_new();
26943 gen_load_gpr(t0
, rs
);
26944 gen_helper_yield(t0
, cpu_env
, t0
);
26945 gen_store_gpr(t0
, rd
);
26950 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
26951 decode_opc_special3_r6(env
, ctx
);
26953 decode_opc_special3_legacy(env
, ctx
);
26958 static bool decode_opc_legacy(CPUMIPSState
*env
, DisasContext
*ctx
)
26961 int rs
, rt
, rd
, sa
;
26965 op
= MASK_OP_MAJOR(ctx
->opcode
);
26966 rs
= (ctx
->opcode
>> 21) & 0x1f;
26967 rt
= (ctx
->opcode
>> 16) & 0x1f;
26968 rd
= (ctx
->opcode
>> 11) & 0x1f;
26969 sa
= (ctx
->opcode
>> 6) & 0x1f;
26970 imm
= (int16_t)ctx
->opcode
;
26973 decode_opc_special(env
, ctx
);
26976 #if defined(TARGET_MIPS64)
26977 if ((ctx
->insn_flags
& INSN_R5900
) && (ctx
->insn_flags
& ASE_MMI
)) {
26978 decode_mmi(env
, ctx
);
26982 #if !defined(TARGET_MIPS64)
26983 if (ctx
->insn_flags
& ASE_MXU
) {
26984 if (MASK_SPECIAL2(ctx
->opcode
) == OPC_MUL
) {
26985 gen_arith(ctx
, OPC_MUL
, rd
, rs
, rt
);
26987 decode_opc_mxu(ctx
, ctx
->opcode
);
26992 decode_opc_special2_legacy(env
, ctx
);
26995 #if defined(TARGET_MIPS64)
26996 if (ctx
->insn_flags
& INSN_R5900
) {
26997 decode_mmi_sq(env
, ctx
); /* MMI_OPC_SQ */
26999 decode_opc_special3(env
, ctx
);
27002 decode_opc_special3(env
, ctx
);
27006 op1
= MASK_REGIMM(ctx
->opcode
);
27008 case OPC_BLTZL
: /* REGIMM branches */
27012 check_insn(ctx
, ISA_MIPS2
);
27013 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
27017 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
27021 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27023 /* OPC_NAL, OPC_BAL */
27024 gen_compute_branch(ctx
, op1
, 4, 0, -1, imm
<< 2, 4);
27026 gen_reserved_instruction(ctx
);
27029 gen_compute_branch(ctx
, op1
, 4, rs
, -1, imm
<< 2, 4);
27032 case OPC_TGEI
: /* REGIMM traps */
27039 check_insn(ctx
, ISA_MIPS2
);
27040 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
27041 gen_trap(ctx
, op1
, rs
, -1, imm
);
27044 check_insn(ctx
, ISA_MIPS_R6
);
27045 gen_reserved_instruction(ctx
);
27048 check_insn(ctx
, ISA_MIPS_R2
);
27050 * Break the TB to be able to sync copied instructions
27053 ctx
->base
.is_jmp
= DISAS_STOP
;
27055 case OPC_BPOSGE32
: /* MIPS DSP branch */
27056 #if defined(TARGET_MIPS64)
27060 gen_compute_branch(ctx
, op1
, 4, -1, -2, (int32_t)imm
<< 2, 4);
27062 #if defined(TARGET_MIPS64)
27064 check_insn(ctx
, ISA_MIPS_R6
);
27065 check_mips_64(ctx
);
27067 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 32);
27071 check_insn(ctx
, ISA_MIPS_R6
);
27072 check_mips_64(ctx
);
27074 tcg_gen_addi_tl(cpu_gpr
[rs
], cpu_gpr
[rs
], (int64_t)imm
<< 48);
27078 default: /* Invalid */
27079 MIPS_INVAL("regimm");
27080 gen_reserved_instruction(ctx
);
27085 check_cp0_enabled(ctx
);
27086 op1
= MASK_CP0(ctx
->opcode
);
27094 #if defined(TARGET_MIPS64)
27098 #ifndef CONFIG_USER_ONLY
27099 gen_cp0(env
, ctx
, op1
, rt
, rd
);
27100 #endif /* !CONFIG_USER_ONLY */
27118 #ifndef CONFIG_USER_ONLY
27119 gen_cp0(env
, ctx
, MASK_C0(ctx
->opcode
), rt
, rd
);
27120 #endif /* !CONFIG_USER_ONLY */
27123 #ifndef CONFIG_USER_ONLY
27126 TCGv t0
= tcg_temp_new();
27128 op2
= MASK_MFMC0(ctx
->opcode
);
27132 gen_helper_dmt(t0
);
27133 gen_store_gpr(t0
, rt
);
27137 gen_helper_emt(t0
);
27138 gen_store_gpr(t0
, rt
);
27142 gen_helper_dvpe(t0
, cpu_env
);
27143 gen_store_gpr(t0
, rt
);
27147 gen_helper_evpe(t0
, cpu_env
);
27148 gen_store_gpr(t0
, rt
);
27151 check_insn(ctx
, ISA_MIPS_R6
);
27153 gen_helper_dvp(t0
, cpu_env
);
27154 gen_store_gpr(t0
, rt
);
27158 check_insn(ctx
, ISA_MIPS_R6
);
27160 gen_helper_evp(t0
, cpu_env
);
27161 gen_store_gpr(t0
, rt
);
27165 check_insn(ctx
, ISA_MIPS_R2
);
27166 save_cpu_state(ctx
, 1);
27167 gen_helper_di(t0
, cpu_env
);
27168 gen_store_gpr(t0
, rt
);
27170 * Stop translation as we may have switched
27171 * the execution mode.
27173 ctx
->base
.is_jmp
= DISAS_STOP
;
27176 check_insn(ctx
, ISA_MIPS_R2
);
27177 save_cpu_state(ctx
, 1);
27178 gen_helper_ei(t0
, cpu_env
);
27179 gen_store_gpr(t0
, rt
);
27181 * DISAS_STOP isn't sufficient, we need to ensure we break
27182 * out of translated code to check for pending interrupts.
27184 gen_save_pc(ctx
->base
.pc_next
+ 4);
27185 ctx
->base
.is_jmp
= DISAS_EXIT
;
27187 default: /* Invalid */
27188 MIPS_INVAL("mfmc0");
27189 gen_reserved_instruction(ctx
);
27194 #endif /* !CONFIG_USER_ONLY */
27197 check_insn(ctx
, ISA_MIPS_R2
);
27198 gen_load_srsgpr(rt
, rd
);
27201 check_insn(ctx
, ISA_MIPS_R2
);
27202 gen_store_srsgpr(rt
, rd
);
27206 gen_reserved_instruction(ctx
);
27210 case OPC_BOVC
: /* OPC_BEQZALC, OPC_BEQC, OPC_ADDI */
27211 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27212 /* OPC_BOVC, OPC_BEQZALC, OPC_BEQC */
27213 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27216 /* Arithmetic with immediate opcode */
27217 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27221 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27223 case OPC_SLTI
: /* Set on less than with immediate opcode */
27225 gen_slt_imm(ctx
, op
, rt
, rs
, imm
);
27227 case OPC_ANDI
: /* Arithmetic with immediate opcode */
27228 case OPC_LUI
: /* OPC_AUI */
27231 gen_logic_imm(ctx
, op
, rt
, rs
, imm
);
27233 case OPC_J
: /* Jump */
27235 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
27236 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
27239 case OPC_BLEZC
: /* OPC_BGEZC, OPC_BGEC, OPC_BLEZL */
27240 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27242 gen_reserved_instruction(ctx
);
27245 /* OPC_BLEZC, OPC_BGEZC, OPC_BGEC */
27246 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27249 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27252 case OPC_BGTZC
: /* OPC_BLTZC, OPC_BLTC, OPC_BGTZL */
27253 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27255 gen_reserved_instruction(ctx
);
27258 /* OPC_BGTZC, OPC_BLTZC, OPC_BLTC */
27259 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27262 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27265 case OPC_BLEZALC
: /* OPC_BGEZALC, OPC_BGEUC, OPC_BLEZ */
27268 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27270 check_insn(ctx
, ISA_MIPS_R6
);
27271 /* OPC_BLEZALC, OPC_BGEZALC, OPC_BGEUC */
27272 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27275 case OPC_BGTZALC
: /* OPC_BLTZALC, OPC_BLTUC, OPC_BGTZ */
27278 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27280 check_insn(ctx
, ISA_MIPS_R6
);
27281 /* OPC_BGTZALC, OPC_BLTZALC, OPC_BLTUC */
27282 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27287 check_insn(ctx
, ISA_MIPS2
);
27288 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
27292 gen_compute_branch(ctx
, op
, 4, rs
, rt
, imm
<< 2, 4);
27294 case OPC_LL
: /* Load and stores */
27295 check_insn(ctx
, ISA_MIPS2
);
27296 if (ctx
->insn_flags
& INSN_R5900
) {
27297 check_insn_opc_user_only(ctx
, INSN_R5900
);
27308 gen_ld(ctx
, op
, rt
, rs
, imm
);
27315 gen_st(ctx
, op
, rt
, rs
, imm
);
27318 check_insn(ctx
, ISA_MIPS2
);
27319 if (ctx
->insn_flags
& INSN_R5900
) {
27320 check_insn_opc_user_only(ctx
, INSN_R5900
);
27322 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TESL
, false);
27325 check_cp0_enabled(ctx
);
27326 check_insn(ctx
, ISA_MIPS3
| ISA_MIPS_R1
);
27327 if (ctx
->hflags
& MIPS_HFLAG_ITC_CACHE
) {
27328 gen_cache_operation(ctx
, rt
, rs
, imm
);
27330 /* Treat as NOP. */
27333 if (ctx
->insn_flags
& INSN_R5900
) {
27334 /* Treat as NOP. */
27336 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R1
);
27337 /* Treat as NOP. */
27341 /* Floating point (COP1). */
27346 gen_cop1_ldst(ctx
, op
, rt
, rs
, imm
);
27350 op1
= MASK_CP1(ctx
->opcode
);
27355 check_cp1_enabled(ctx
);
27356 check_insn(ctx
, ISA_MIPS_R2
);
27362 check_cp1_enabled(ctx
);
27363 gen_cp1(ctx
, op1
, rt
, rd
);
27365 #if defined(TARGET_MIPS64)
27368 check_cp1_enabled(ctx
);
27369 check_insn(ctx
, ISA_MIPS3
);
27370 check_mips_64(ctx
);
27371 gen_cp1(ctx
, op1
, rt
, rd
);
27374 case OPC_BC1EQZ
: /* OPC_BC1ANY2 */
27375 check_cp1_enabled(ctx
);
27376 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27378 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
27383 check_insn(ctx
, ASE_MIPS3D
);
27384 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
27385 (rt
>> 2) & 0x7, imm
<< 2);
27389 check_cp1_enabled(ctx
);
27390 check_insn(ctx
, ISA_MIPS_R6
);
27391 gen_compute_branch1_r6(ctx
, MASK_CP1(ctx
->opcode
),
27395 check_cp1_enabled(ctx
);
27396 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
27398 check_insn(ctx
, ASE_MIPS3D
);
27401 check_cp1_enabled(ctx
);
27402 check_insn_opc_removed(ctx
, ISA_MIPS_R6
);
27403 gen_compute_branch1(ctx
, MASK_BC1(ctx
->opcode
),
27404 (rt
>> 2) & 0x7, imm
<< 2);
27411 check_cp1_enabled(ctx
);
27412 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
27418 int r6_op
= ctx
->opcode
& FOP(0x3f, 0x1f);
27419 check_cp1_enabled(ctx
);
27420 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27422 case R6_OPC_CMP_AF_S
:
27423 case R6_OPC_CMP_UN_S
:
27424 case R6_OPC_CMP_EQ_S
:
27425 case R6_OPC_CMP_UEQ_S
:
27426 case R6_OPC_CMP_LT_S
:
27427 case R6_OPC_CMP_ULT_S
:
27428 case R6_OPC_CMP_LE_S
:
27429 case R6_OPC_CMP_ULE_S
:
27430 case R6_OPC_CMP_SAF_S
:
27431 case R6_OPC_CMP_SUN_S
:
27432 case R6_OPC_CMP_SEQ_S
:
27433 case R6_OPC_CMP_SEUQ_S
:
27434 case R6_OPC_CMP_SLT_S
:
27435 case R6_OPC_CMP_SULT_S
:
27436 case R6_OPC_CMP_SLE_S
:
27437 case R6_OPC_CMP_SULE_S
:
27438 case R6_OPC_CMP_OR_S
:
27439 case R6_OPC_CMP_UNE_S
:
27440 case R6_OPC_CMP_NE_S
:
27441 case R6_OPC_CMP_SOR_S
:
27442 case R6_OPC_CMP_SUNE_S
:
27443 case R6_OPC_CMP_SNE_S
:
27444 gen_r6_cmp_s(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
27446 case R6_OPC_CMP_AF_D
:
27447 case R6_OPC_CMP_UN_D
:
27448 case R6_OPC_CMP_EQ_D
:
27449 case R6_OPC_CMP_UEQ_D
:
27450 case R6_OPC_CMP_LT_D
:
27451 case R6_OPC_CMP_ULT_D
:
27452 case R6_OPC_CMP_LE_D
:
27453 case R6_OPC_CMP_ULE_D
:
27454 case R6_OPC_CMP_SAF_D
:
27455 case R6_OPC_CMP_SUN_D
:
27456 case R6_OPC_CMP_SEQ_D
:
27457 case R6_OPC_CMP_SEUQ_D
:
27458 case R6_OPC_CMP_SLT_D
:
27459 case R6_OPC_CMP_SULT_D
:
27460 case R6_OPC_CMP_SLE_D
:
27461 case R6_OPC_CMP_SULE_D
:
27462 case R6_OPC_CMP_OR_D
:
27463 case R6_OPC_CMP_UNE_D
:
27464 case R6_OPC_CMP_NE_D
:
27465 case R6_OPC_CMP_SOR_D
:
27466 case R6_OPC_CMP_SUNE_D
:
27467 case R6_OPC_CMP_SNE_D
:
27468 gen_r6_cmp_d(ctx
, ctx
->opcode
& 0x1f, rt
, rd
, sa
);
27471 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f),
27472 rt
, rd
, sa
, (imm
>> 8) & 0x7);
27477 gen_farith(ctx
, ctx
->opcode
& FOP(0x3f, 0x1f), rt
, rd
, sa
,
27484 gen_reserved_instruction(ctx
);
27489 /* Compact branches [R6] and COP2 [non-R6] */
27490 case OPC_BC
: /* OPC_LWC2 */
27491 case OPC_BALC
: /* OPC_SWC2 */
27492 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27493 /* OPC_BC, OPC_BALC */
27494 gen_compute_compact_branch(ctx
, op
, 0, 0,
27495 sextract32(ctx
->opcode
<< 2, 0, 28));
27496 } else if (ctx
->insn_flags
& ASE_LEXT
) {
27497 gen_loongson_lswc2(ctx
, rt
, rs
, rd
);
27499 /* OPC_LWC2, OPC_SWC2 */
27500 /* COP2: Not implemented. */
27501 generate_exception_err(ctx
, EXCP_CpU
, 2);
27504 case OPC_BEQZC
: /* OPC_JIC, OPC_LDC2 */
27505 case OPC_BNEZC
: /* OPC_JIALC, OPC_SDC2 */
27506 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27508 /* OPC_BEQZC, OPC_BNEZC */
27509 gen_compute_compact_branch(ctx
, op
, rs
, 0,
27510 sextract32(ctx
->opcode
<< 2, 0, 23));
27512 /* OPC_JIC, OPC_JIALC */
27513 gen_compute_compact_branch(ctx
, op
, 0, rt
, imm
);
27515 } else if (ctx
->insn_flags
& ASE_LEXT
) {
27516 gen_loongson_lsdc2(ctx
, rt
, rs
, rd
);
27518 /* OPC_LWC2, OPC_SWC2 */
27519 /* COP2: Not implemented. */
27520 generate_exception_err(ctx
, EXCP_CpU
, 2);
27524 check_insn(ctx
, ASE_LMMI
);
27525 /* Note that these instructions use different fields. */
27526 gen_loongson_multimedia(ctx
, sa
, rd
, rt
);
27530 if (ctx
->CP0_Config1
& (1 << CP0C1_FP
)) {
27531 check_cp1_enabled(ctx
);
27532 op1
= MASK_CP3(ctx
->opcode
);
27536 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
27542 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
27543 gen_flt3_ldst(ctx
, op1
, sa
, rd
, rs
, rt
);
27546 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
27547 /* Treat as NOP. */
27550 check_insn(ctx
, ISA_MIPS5
| ISA_MIPS_R2
);
27564 check_insn(ctx
, ISA_MIPS4
| ISA_MIPS_R2
);
27565 gen_flt3_arith(ctx
, op1
, sa
, rs
, rd
, rt
);
27569 gen_reserved_instruction(ctx
);
27573 generate_exception_err(ctx
, EXCP_CpU
, 1);
27577 #if defined(TARGET_MIPS64)
27578 /* MIPS64 opcodes */
27580 if (ctx
->insn_flags
& INSN_R5900
) {
27581 check_insn_opc_user_only(ctx
, INSN_R5900
);
27588 check_insn(ctx
, ISA_MIPS3
);
27589 check_mips_64(ctx
);
27590 gen_ld(ctx
, op
, rt
, rs
, imm
);
27595 check_insn(ctx
, ISA_MIPS3
);
27596 check_mips_64(ctx
);
27597 gen_st(ctx
, op
, rt
, rs
, imm
);
27600 check_insn(ctx
, ISA_MIPS3
);
27601 if (ctx
->insn_flags
& INSN_R5900
) {
27602 check_insn_opc_user_only(ctx
, INSN_R5900
);
27604 check_mips_64(ctx
);
27605 gen_st_cond(ctx
, rt
, rs
, imm
, MO_TEQ
, false);
27607 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC, OPC_DADDI */
27608 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27609 /* OPC_BNVC, OPC_BNEZALC, OPC_BNEC */
27610 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27613 check_insn(ctx
, ISA_MIPS3
);
27614 check_mips_64(ctx
);
27615 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27619 check_insn(ctx
, ISA_MIPS3
);
27620 check_mips_64(ctx
);
27621 gen_arith_imm(ctx
, op
, rt
, rs
, imm
);
27624 case OPC_BNVC
: /* OPC_BNEZALC, OPC_BNEC */
27625 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27626 gen_compute_compact_branch(ctx
, op
, rs
, rt
, imm
<< 2);
27628 MIPS_INVAL("major opcode");
27629 gen_reserved_instruction(ctx
);
27633 case OPC_DAUI
: /* OPC_JALX */
27634 if (ctx
->insn_flags
& ISA_MIPS_R6
) {
27635 #if defined(TARGET_MIPS64)
27637 check_mips_64(ctx
);
27639 generate_exception(ctx
, EXCP_RI
);
27640 } else if (rt
!= 0) {
27641 TCGv t0
= tcg_temp_new();
27642 gen_load_gpr(t0
, rs
);
27643 tcg_gen_addi_tl(cpu_gpr
[rt
], t0
, imm
<< 16);
27647 gen_reserved_instruction(ctx
);
27648 MIPS_INVAL("major opcode");
27652 check_insn(ctx
, ASE_MIPS16
| ASE_MICROMIPS
);
27653 offset
= (int32_t)(ctx
->opcode
& 0x3FFFFFF) << 2;
27654 gen_compute_branch(ctx
, op
, 4, rs
, rt
, offset
, 4);
27657 case OPC_MDMX
: /* MMI_OPC_LQ */
27658 if (ctx
->insn_flags
& INSN_R5900
) {
27659 #if defined(TARGET_MIPS64)
27660 gen_mmi_lq(env
, ctx
);
27663 /* MDMX: Not implemented. */
27667 check_insn(ctx
, ISA_MIPS_R6
);
27668 gen_pcrel(ctx
, ctx
->opcode
, ctx
->base
.pc_next
, rs
);
27670 default: /* Invalid */
27671 MIPS_INVAL("major opcode");
27677 static void decode_opc(CPUMIPSState
*env
, DisasContext
*ctx
)
27679 /* make sure instructions are on a word boundary */
27680 if (ctx
->base
.pc_next
& 0x3) {
27681 env
->CP0_BadVAddr
= ctx
->base
.pc_next
;
27682 generate_exception_err(ctx
, EXCP_AdEL
, EXCP_INST_NOTAVAIL
);
27686 /* Handle blikely not taken case */
27687 if ((ctx
->hflags
& MIPS_HFLAG_BMASK_BASE
) == MIPS_HFLAG_BL
) {
27688 TCGLabel
*l1
= gen_new_label();
27690 tcg_gen_brcondi_tl(TCG_COND_NE
, bcond
, 0, l1
);
27691 tcg_gen_movi_i32(hflags
, ctx
->hflags
& ~MIPS_HFLAG_BMASK
);
27692 gen_goto_tb(ctx
, 1, ctx
->base
.pc_next
+ 4);
27696 /* Transition to the auto-generated decoder. */
27698 /* ISA extensions */
27699 if (ase_msa_available(env
) && decode_ase_msa(ctx
, ctx
->opcode
)) {
27703 /* ISA (from latest to oldest) */
27704 if (cpu_supports_isa(env
, ISA_MIPS_R6
) && decode_isa_rel6(ctx
, ctx
->opcode
)) {
27708 if (decode_opc_legacy(env
, ctx
)) {
27712 gen_reserved_instruction(ctx
);
27715 static void mips_tr_init_disas_context(DisasContextBase
*dcbase
, CPUState
*cs
)
27717 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27718 CPUMIPSState
*env
= cs
->env_ptr
;
27720 ctx
->page_start
= ctx
->base
.pc_first
& TARGET_PAGE_MASK
;
27721 ctx
->saved_pc
= -1;
27722 ctx
->insn_flags
= env
->insn_flags
;
27723 ctx
->CP0_Config1
= env
->CP0_Config1
;
27724 ctx
->CP0_Config2
= env
->CP0_Config2
;
27725 ctx
->CP0_Config3
= env
->CP0_Config3
;
27726 ctx
->CP0_Config5
= env
->CP0_Config5
;
27728 ctx
->kscrexist
= (env
->CP0_Config4
>> CP0C4_KScrExist
) & 0xff;
27729 ctx
->rxi
= (env
->CP0_Config3
>> CP0C3_RXI
) & 1;
27730 ctx
->ie
= (env
->CP0_Config4
>> CP0C4_IE
) & 3;
27731 ctx
->bi
= (env
->CP0_Config3
>> CP0C3_BI
) & 1;
27732 ctx
->bp
= (env
->CP0_Config3
>> CP0C3_BP
) & 1;
27733 ctx
->PAMask
= env
->PAMask
;
27734 ctx
->mvh
= (env
->CP0_Config5
>> CP0C5_MVH
) & 1;
27735 ctx
->eva
= (env
->CP0_Config5
>> CP0C5_EVA
) & 1;
27736 ctx
->sc
= (env
->CP0_Config3
>> CP0C3_SC
) & 1;
27737 ctx
->CP0_LLAddr_shift
= env
->CP0_LLAddr_shift
;
27738 ctx
->cmgcr
= (env
->CP0_Config3
>> CP0C3_CMGCR
) & 1;
27739 /* Restore delay slot state from the tb context. */
27740 ctx
->hflags
= (uint32_t)ctx
->base
.tb
->flags
; /* FIXME: maybe use 64 bits? */
27741 ctx
->ulri
= (env
->CP0_Config3
>> CP0C3_ULRI
) & 1;
27742 ctx
->ps
= ((env
->active_fpu
.fcr0
>> FCR0_PS
) & 1) ||
27743 (env
->insn_flags
& (INSN_LOONGSON2E
| INSN_LOONGSON2F
));
27744 ctx
->vp
= (env
->CP0_Config5
>> CP0C5_VP
) & 1;
27745 ctx
->mrp
= (env
->CP0_Config5
>> CP0C5_MRP
) & 1;
27746 ctx
->nan2008
= (env
->active_fpu
.fcr31
>> FCR31_NAN2008
) & 1;
27747 ctx
->abs2008
= (env
->active_fpu
.fcr31
>> FCR31_ABS2008
) & 1;
27748 ctx
->mi
= (env
->CP0_Config5
>> CP0C5_MI
) & 1;
27749 ctx
->gi
= (env
->CP0_Config5
>> CP0C5_GI
) & 3;
27750 restore_cpu_state(env
, ctx
);
27751 #ifdef CONFIG_USER_ONLY
27752 ctx
->mem_idx
= MIPS_HFLAG_UM
;
27754 ctx
->mem_idx
= hflags_mmu_index(ctx
->hflags
);
27756 ctx
->default_tcg_memop_mask
= (ctx
->insn_flags
& (ISA_MIPS_R6
|
27757 INSN_LOONGSON3A
)) ? MO_UNALN
: MO_ALIGN
;
27759 LOG_DISAS("\ntb %p idx %d hflags %04x\n", ctx
->base
.tb
, ctx
->mem_idx
,
27763 static void mips_tr_tb_start(DisasContextBase
*dcbase
, CPUState
*cs
)
27767 static void mips_tr_insn_start(DisasContextBase
*dcbase
, CPUState
*cs
)
27769 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27771 tcg_gen_insn_start(ctx
->base
.pc_next
, ctx
->hflags
& MIPS_HFLAG_BMASK
,
27775 static bool mips_tr_breakpoint_check(DisasContextBase
*dcbase
, CPUState
*cs
,
27776 const CPUBreakpoint
*bp
)
27778 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27780 save_cpu_state(ctx
, 1);
27781 ctx
->base
.is_jmp
= DISAS_NORETURN
;
27782 gen_helper_raise_exception_debug(cpu_env
);
27784 * The address covered by the breakpoint must be included in
27785 * [tb->pc, tb->pc + tb->size) in order to for it to be
27786 * properly cleared -- thus we increment the PC here so that
27787 * the logic setting tb->size below does the right thing.
27789 ctx
->base
.pc_next
+= 4;
27793 static void mips_tr_translate_insn(DisasContextBase
*dcbase
, CPUState
*cs
)
27795 CPUMIPSState
*env
= cs
->env_ptr
;
27796 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27800 is_slot
= ctx
->hflags
& MIPS_HFLAG_BMASK
;
27801 if (ctx
->insn_flags
& ISA_NANOMIPS32
) {
27802 ctx
->opcode
= translator_lduw(env
, ctx
->base
.pc_next
);
27803 insn_bytes
= decode_nanomips_opc(env
, ctx
);
27804 } else if (!(ctx
->hflags
& MIPS_HFLAG_M16
)) {
27805 ctx
->opcode
= translator_ldl(env
, ctx
->base
.pc_next
);
27807 decode_opc(env
, ctx
);
27808 } else if (ctx
->insn_flags
& ASE_MICROMIPS
) {
27809 ctx
->opcode
= translator_lduw(env
, ctx
->base
.pc_next
);
27810 insn_bytes
= decode_micromips_opc(env
, ctx
);
27811 } else if (ctx
->insn_flags
& ASE_MIPS16
) {
27812 ctx
->opcode
= translator_lduw(env
, ctx
->base
.pc_next
);
27813 insn_bytes
= decode_mips16_opc(env
, ctx
);
27815 gen_reserved_instruction(ctx
);
27816 g_assert(ctx
->base
.is_jmp
== DISAS_NORETURN
);
27820 if (ctx
->hflags
& MIPS_HFLAG_BMASK
) {
27821 if (!(ctx
->hflags
& (MIPS_HFLAG_BDS16
| MIPS_HFLAG_BDS32
|
27822 MIPS_HFLAG_FBNSLOT
))) {
27824 * Force to generate branch as there is neither delay nor
27829 if ((ctx
->hflags
& MIPS_HFLAG_M16
) &&
27830 (ctx
->hflags
& MIPS_HFLAG_FBNSLOT
)) {
27832 * Force to generate branch as microMIPS R6 doesn't restrict
27833 * branches in the forbidden slot.
27839 gen_branch(ctx
, insn_bytes
);
27841 ctx
->base
.pc_next
+= insn_bytes
;
27843 if (ctx
->base
.is_jmp
!= DISAS_NEXT
) {
27847 * Execute a branch and its delay slot as a single instruction.
27848 * This is what GDB expects and is consistent with what the
27849 * hardware does (e.g. if a delay slot instruction faults, the
27850 * reported PC is the PC of the branch).
27852 if (ctx
->base
.singlestep_enabled
&&
27853 (ctx
->hflags
& MIPS_HFLAG_BMASK
) == 0) {
27854 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
27856 if (ctx
->base
.pc_next
- ctx
->page_start
>= TARGET_PAGE_SIZE
) {
27857 ctx
->base
.is_jmp
= DISAS_TOO_MANY
;
27861 static void mips_tr_tb_stop(DisasContextBase
*dcbase
, CPUState
*cs
)
27863 DisasContext
*ctx
= container_of(dcbase
, DisasContext
, base
);
27865 if (ctx
->base
.singlestep_enabled
&& ctx
->base
.is_jmp
!= DISAS_NORETURN
) {
27866 save_cpu_state(ctx
, ctx
->base
.is_jmp
!= DISAS_EXIT
);
27867 gen_helper_raise_exception_debug(cpu_env
);
27869 switch (ctx
->base
.is_jmp
) {
27871 gen_save_pc(ctx
->base
.pc_next
);
27872 tcg_gen_lookup_and_goto_ptr();
27875 case DISAS_TOO_MANY
:
27876 save_cpu_state(ctx
, 0);
27877 gen_goto_tb(ctx
, 0, ctx
->base
.pc_next
);
27880 tcg_gen_exit_tb(NULL
, 0);
27882 case DISAS_NORETURN
:
27885 g_assert_not_reached();
27890 static void mips_tr_disas_log(const DisasContextBase
*dcbase
, CPUState
*cs
)
27892 qemu_log("IN: %s\n", lookup_symbol(dcbase
->pc_first
));
27893 log_target_disas(cs
, dcbase
->pc_first
, dcbase
->tb
->size
);
27896 static const TranslatorOps mips_tr_ops
= {
27897 .init_disas_context
= mips_tr_init_disas_context
,
27898 .tb_start
= mips_tr_tb_start
,
27899 .insn_start
= mips_tr_insn_start
,
27900 .breakpoint_check
= mips_tr_breakpoint_check
,
27901 .translate_insn
= mips_tr_translate_insn
,
27902 .tb_stop
= mips_tr_tb_stop
,
27903 .disas_log
= mips_tr_disas_log
,
27906 void gen_intermediate_code(CPUState
*cs
, TranslationBlock
*tb
, int max_insns
)
27910 translator_loop(&mips_tr_ops
, &ctx
.base
, cs
, tb
, max_insns
);
27913 static void fpu_dump_state(CPUMIPSState
*env
, FILE * f
, int flags
)
27916 int is_fpu64
= !!(env
->hflags
& MIPS_HFLAG_F64
);
27918 #define printfpr(fp) \
27921 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
27922 " fd:%13g fs:%13g psu: %13g\n", \
27923 (fp)->w[FP_ENDIAN_IDX], (fp)->d, \
27924 (double)(fp)->fd, \
27925 (double)(fp)->fs[FP_ENDIAN_IDX], \
27926 (double)(fp)->fs[!FP_ENDIAN_IDX]); \
27929 tmp.w[FP_ENDIAN_IDX] = (fp)->w[FP_ENDIAN_IDX]; \
27930 tmp.w[!FP_ENDIAN_IDX] = ((fp) + 1)->w[FP_ENDIAN_IDX]; \
27931 qemu_fprintf(f, "w:%08x d:%016" PRIx64 \
27932 " fd:%13g fs:%13g psu:%13g\n", \
27933 tmp.w[FP_ENDIAN_IDX], tmp.d, \
27935 (double)tmp.fs[FP_ENDIAN_IDX], \
27936 (double)tmp.fs[!FP_ENDIAN_IDX]); \
27942 "CP1 FCR0 0x%08x FCR31 0x%08x SR.FR %d fp_status 0x%02x\n",
27943 env
->active_fpu
.fcr0
, env
->active_fpu
.fcr31
, is_fpu64
,
27944 get_float_exception_flags(&env
->active_fpu
.fp_status
));
27945 for (i
= 0; i
< 32; (is_fpu64
) ? i
++ : (i
+= 2)) {
27946 qemu_fprintf(f
, "%3s: ", fregnames
[i
]);
27947 printfpr(&env
->active_fpu
.fpr
[i
]);
27953 void mips_cpu_dump_state(CPUState
*cs
, FILE *f
, int flags
)
27955 MIPSCPU
*cpu
= MIPS_CPU(cs
);
27956 CPUMIPSState
*env
= &cpu
->env
;
27959 qemu_fprintf(f
, "pc=0x" TARGET_FMT_lx
" HI=0x" TARGET_FMT_lx
27960 " LO=0x" TARGET_FMT_lx
" ds %04x "
27961 TARGET_FMT_lx
" " TARGET_FMT_ld
"\n",
27962 env
->active_tc
.PC
, env
->active_tc
.HI
[0], env
->active_tc
.LO
[0],
27963 env
->hflags
, env
->btarget
, env
->bcond
);
27964 for (i
= 0; i
< 32; i
++) {
27965 if ((i
& 3) == 0) {
27966 qemu_fprintf(f
, "GPR%02d:", i
);
27968 qemu_fprintf(f
, " %s " TARGET_FMT_lx
,
27969 regnames
[i
], env
->active_tc
.gpr
[i
]);
27970 if ((i
& 3) == 3) {
27971 qemu_fprintf(f
, "\n");
27975 qemu_fprintf(f
, "CP0 Status 0x%08x Cause 0x%08x EPC 0x"
27976 TARGET_FMT_lx
"\n",
27977 env
->CP0_Status
, env
->CP0_Cause
, env
->CP0_EPC
);
27978 qemu_fprintf(f
, " Config0 0x%08x Config1 0x%08x LLAddr 0x%016"
27980 env
->CP0_Config0
, env
->CP0_Config1
, env
->CP0_LLAddr
);
27981 qemu_fprintf(f
, " Config2 0x%08x Config3 0x%08x\n",
27982 env
->CP0_Config2
, env
->CP0_Config3
);
27983 qemu_fprintf(f
, " Config4 0x%08x Config5 0x%08x\n",
27984 env
->CP0_Config4
, env
->CP0_Config5
);
27985 if ((flags
& CPU_DUMP_FPU
) && (env
->hflags
& MIPS_HFLAG_FPU
)) {
27986 fpu_dump_state(env
, f
, flags
);
27990 void mips_tcg_init(void)
27995 for (i
= 1; i
< 32; i
++)
27996 cpu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
27997 offsetof(CPUMIPSState
,
28000 #if defined(TARGET_MIPS64)
28001 cpu_gpr_hi
[0] = NULL
;
28003 for (unsigned i
= 1; i
< 32; i
++) {
28004 g_autofree
char *rname
= g_strdup_printf("%s[hi]", regnames
[i
]);
28006 cpu_gpr_hi
[i
] = tcg_global_mem_new_i64(cpu_env
,
28007 offsetof(CPUMIPSState
,
28008 active_tc
.gpr_hi
[i
]),
28011 #endif /* !TARGET_MIPS64 */
28012 for (i
= 0; i
< 32; i
++) {
28013 int off
= offsetof(CPUMIPSState
, active_fpu
.fpr
[i
].wr
.d
[0]);
28015 fpu_f64
[i
] = tcg_global_mem_new_i64(cpu_env
, off
, fregnames
[i
]);
28017 msa_translate_init();
28018 cpu_PC
= tcg_global_mem_new(cpu_env
,
28019 offsetof(CPUMIPSState
, active_tc
.PC
), "PC");
28020 for (i
= 0; i
< MIPS_DSP_ACC
; i
++) {
28021 cpu_HI
[i
] = tcg_global_mem_new(cpu_env
,
28022 offsetof(CPUMIPSState
, active_tc
.HI
[i
]),
28024 cpu_LO
[i
] = tcg_global_mem_new(cpu_env
,
28025 offsetof(CPUMIPSState
, active_tc
.LO
[i
]),
28028 cpu_dspctrl
= tcg_global_mem_new(cpu_env
,
28029 offsetof(CPUMIPSState
,
28030 active_tc
.DSPControl
),
28032 bcond
= tcg_global_mem_new(cpu_env
,
28033 offsetof(CPUMIPSState
, bcond
), "bcond");
28034 btarget
= tcg_global_mem_new(cpu_env
,
28035 offsetof(CPUMIPSState
, btarget
), "btarget");
28036 hflags
= tcg_global_mem_new_i32(cpu_env
,
28037 offsetof(CPUMIPSState
, hflags
), "hflags");
28039 fpu_fcr0
= tcg_global_mem_new_i32(cpu_env
,
28040 offsetof(CPUMIPSState
, active_fpu
.fcr0
),
28042 fpu_fcr31
= tcg_global_mem_new_i32(cpu_env
,
28043 offsetof(CPUMIPSState
, active_fpu
.fcr31
),
28045 cpu_lladdr
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, lladdr
),
28047 cpu_llval
= tcg_global_mem_new(cpu_env
, offsetof(CPUMIPSState
, llval
),
28050 #if !defined(TARGET_MIPS64)
28051 for (i
= 0; i
< NUMBER_OF_MXU_REGISTERS
- 1; i
++) {
28052 mxu_gpr
[i
] = tcg_global_mem_new(cpu_env
,
28053 offsetof(CPUMIPSState
,
28054 active_tc
.mxu_gpr
[i
]),
28058 mxu_CR
= tcg_global_mem_new(cpu_env
,
28059 offsetof(CPUMIPSState
, active_tc
.mxu_cr
),
28060 mxuregnames
[NUMBER_OF_MXU_REGISTERS
- 1]);
28061 #endif /* !TARGET_MIPS64 */
28064 void restore_state_to_opc(CPUMIPSState
*env
, TranslationBlock
*tb
,
28065 target_ulong
*data
)
28067 env
->active_tc
.PC
= data
[0];
28068 env
->hflags
&= ~MIPS_HFLAG_BMASK
;
28069 env
->hflags
|= data
[1];
28070 switch (env
->hflags
& MIPS_HFLAG_BMASK_BASE
) {
28071 case MIPS_HFLAG_BR
:
28073 case MIPS_HFLAG_BC
:
28074 case MIPS_HFLAG_BL
:
28076 env
->btarget
= data
[2];