2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
6 * A small micro-assembler. It is intentionally kept simple, does only
7 * support a subset of instructions, and does not try to hide pipeline
8 * effects like branch delay slots.
10 * Copyright (C) 2004, 2005, 2006, 2008 Thiemo Seufer
11 * Copyright (C) 2005, 2007 Maciej W. Rozycki
12 * Copyright (C) 2006 Ralf Baechle (ralf@linux-mips.org)
15 #include <linux/kernel.h>
16 #include <linux/types.h>
17 #include <linux/init.h>
48 #define IMM_MASK 0xffff
50 #define JIMM_MASK 0x3ffffff
52 #define FUNC_MASK 0x3f
56 #define SCIMM_MASK 0xfffff
61 insn_addu
, insn_addiu
, insn_and
, insn_andi
, insn_beq
,
62 insn_beql
, insn_bgez
, insn_bgezl
, insn_bltz
, insn_bltzl
,
63 insn_bne
, insn_cache
, insn_daddu
, insn_daddiu
, insn_dmfc0
,
64 insn_dmtc0
, insn_dsll
, insn_dsll32
, insn_dsra
, insn_dsrl
,
65 insn_dsrl32
, insn_drotr
, insn_drotr32
, insn_dsubu
, insn_eret
,
66 insn_j
, insn_jal
, insn_jr
, insn_ld
, insn_ll
, insn_lld
,
67 insn_lui
, insn_lw
, insn_mfc0
, insn_mtc0
, insn_or
, insn_ori
,
68 insn_pref
, insn_rfe
, insn_sc
, insn_scd
, insn_sd
, insn_sll
,
69 insn_sra
, insn_srl
, insn_rotr
, insn_subu
, insn_sw
, insn_tlbp
,
70 insn_tlbr
, insn_tlbwi
, insn_tlbwr
, insn_xor
, insn_xori
,
71 insn_dins
, insn_dinsm
, insn_syscall
, insn_bbit0
, insn_bbit1
,
81 /* This macro sets the non-variable bits of an instruction. */
82 #define M(a, b, c, d, e, f) \
90 static struct insn insn_table
[] __uasminitdata
= {
91 { insn_addiu
, M(addiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
92 { insn_addu
, M(spec_op
, 0, 0, 0, 0, addu_op
), RS
| RT
| RD
},
93 { insn_and
, M(spec_op
, 0, 0, 0, 0, and_op
), RS
| RT
| RD
},
94 { insn_andi
, M(andi_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
95 { insn_beq
, M(beq_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
96 { insn_beql
, M(beql_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
97 { insn_bgez
, M(bcond_op
, 0, bgez_op
, 0, 0, 0), RS
| BIMM
},
98 { insn_bgezl
, M(bcond_op
, 0, bgezl_op
, 0, 0, 0), RS
| BIMM
},
99 { insn_bltz
, M(bcond_op
, 0, bltz_op
, 0, 0, 0), RS
| BIMM
},
100 { insn_bltzl
, M(bcond_op
, 0, bltzl_op
, 0, 0, 0), RS
| BIMM
},
101 { insn_bne
, M(bne_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
102 { insn_cache
, M(cache_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
103 { insn_daddiu
, M(daddiu_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
104 { insn_daddu
, M(spec_op
, 0, 0, 0, 0, daddu_op
), RS
| RT
| RD
},
105 { insn_dmfc0
, M(cop0_op
, dmfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
106 { insn_dmtc0
, M(cop0_op
, dmtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
107 { insn_dsll
, M(spec_op
, 0, 0, 0, 0, dsll_op
), RT
| RD
| RE
},
108 { insn_dsll32
, M(spec_op
, 0, 0, 0, 0, dsll32_op
), RT
| RD
| RE
},
109 { insn_dsra
, M(spec_op
, 0, 0, 0, 0, dsra_op
), RT
| RD
| RE
},
110 { insn_dsrl
, M(spec_op
, 0, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
111 { insn_dsrl32
, M(spec_op
, 0, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
112 { insn_drotr
, M(spec_op
, 1, 0, 0, 0, dsrl_op
), RT
| RD
| RE
},
113 { insn_drotr32
, M(spec_op
, 1, 0, 0, 0, dsrl32_op
), RT
| RD
| RE
},
114 { insn_dsubu
, M(spec_op
, 0, 0, 0, 0, dsubu_op
), RS
| RT
| RD
},
115 { insn_eret
, M(cop0_op
, cop_op
, 0, 0, 0, eret_op
), 0 },
116 { insn_j
, M(j_op
, 0, 0, 0, 0, 0), JIMM
},
117 { insn_jal
, M(jal_op
, 0, 0, 0, 0, 0), JIMM
},
118 { insn_jr
, M(spec_op
, 0, 0, 0, 0, jr_op
), RS
},
119 { insn_ld
, M(ld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
120 { insn_ll
, M(ll_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
121 { insn_lld
, M(lld_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
122 { insn_lui
, M(lui_op
, 0, 0, 0, 0, 0), RT
| SIMM
},
123 { insn_lw
, M(lw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
124 { insn_mfc0
, M(cop0_op
, mfc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
125 { insn_mtc0
, M(cop0_op
, mtc_op
, 0, 0, 0, 0), RT
| RD
| SET
},
126 { insn_or
, M(spec_op
, 0, 0, 0, 0, or_op
), RS
| RT
| RD
},
127 { insn_ori
, M(ori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
128 { insn_pref
, M(pref_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
129 { insn_rfe
, M(cop0_op
, cop_op
, 0, 0, 0, rfe_op
), 0 },
130 { insn_sc
, M(sc_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
131 { insn_scd
, M(scd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
132 { insn_sd
, M(sd_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
133 { insn_sll
, M(spec_op
, 0, 0, 0, 0, sll_op
), RT
| RD
| RE
},
134 { insn_sra
, M(spec_op
, 0, 0, 0, 0, sra_op
), RT
| RD
| RE
},
135 { insn_srl
, M(spec_op
, 0, 0, 0, 0, srl_op
), RT
| RD
| RE
},
136 { insn_rotr
, M(spec_op
, 1, 0, 0, 0, srl_op
), RT
| RD
| RE
},
137 { insn_subu
, M(spec_op
, 0, 0, 0, 0, subu_op
), RS
| RT
| RD
},
138 { insn_sw
, M(sw_op
, 0, 0, 0, 0, 0), RS
| RT
| SIMM
},
139 { insn_tlbp
, M(cop0_op
, cop_op
, 0, 0, 0, tlbp_op
), 0 },
140 { insn_tlbr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbr_op
), 0 },
141 { insn_tlbwi
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwi_op
), 0 },
142 { insn_tlbwr
, M(cop0_op
, cop_op
, 0, 0, 0, tlbwr_op
), 0 },
143 { insn_xor
, M(spec_op
, 0, 0, 0, 0, xor_op
), RS
| RT
| RD
},
144 { insn_xori
, M(xori_op
, 0, 0, 0, 0, 0), RS
| RT
| UIMM
},
145 { insn_dins
, M(spec3_op
, 0, 0, 0, 0, dins_op
), RS
| RT
| RD
| RE
},
146 { insn_dinsm
, M(spec3_op
, 0, 0, 0, 0, dinsm_op
), RS
| RT
| RD
| RE
},
147 { insn_syscall
, M(spec_op
, 0, 0, 0, 0, syscall_op
), SCIMM
},
148 { insn_bbit0
, M(lwc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
149 { insn_bbit1
, M(swc2_op
, 0, 0, 0, 0, 0), RS
| RT
| BIMM
},
150 { insn_lwx
, M(spec3_op
, 0, 0, 0, lwx_op
, lx_op
), RS
| RT
| RD
},
151 { insn_ldx
, M(spec3_op
, 0, 0, 0, ldx_op
, lx_op
), RS
| RT
| RD
},
152 { insn_invalid
, 0, 0 }
157 static inline __uasminit u32
build_rs(u32 arg
)
159 WARN(arg
& ~RS_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
161 return (arg
& RS_MASK
) << RS_SH
;
164 static inline __uasminit u32
build_rt(u32 arg
)
166 WARN(arg
& ~RT_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
168 return (arg
& RT_MASK
) << RT_SH
;
171 static inline __uasminit u32
build_rd(u32 arg
)
173 WARN(arg
& ~RD_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
175 return (arg
& RD_MASK
) << RD_SH
;
178 static inline __uasminit u32
build_re(u32 arg
)
180 WARN(arg
& ~RE_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
182 return (arg
& RE_MASK
) << RE_SH
;
185 static inline __uasminit u32
build_simm(s32 arg
)
187 WARN(arg
> 0x7fff || arg
< -0x8000,
188 KERN_WARNING
"Micro-assembler field overflow\n");
193 static inline __uasminit u32
build_uimm(u32 arg
)
195 WARN(arg
& ~IMM_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
197 return arg
& IMM_MASK
;
200 static inline __uasminit u32
build_bimm(s32 arg
)
202 WARN(arg
> 0x1ffff || arg
< -0x20000,
203 KERN_WARNING
"Micro-assembler field overflow\n");
205 WARN(arg
& 0x3, KERN_WARNING
"Invalid micro-assembler branch target\n");
207 return ((arg
< 0) ? (1 << 15) : 0) | ((arg
>> 2) & 0x7fff);
210 static inline __uasminit u32
build_jimm(u32 arg
)
212 WARN(arg
& ~(JIMM_MASK
<< 2),
213 KERN_WARNING
"Micro-assembler field overflow\n");
215 return (arg
>> 2) & JIMM_MASK
;
218 static inline __uasminit u32
build_scimm(u32 arg
)
220 WARN(arg
& ~SCIMM_MASK
,
221 KERN_WARNING
"Micro-assembler field overflow\n");
223 return (arg
& SCIMM_MASK
) << SCIMM_SH
;
226 static inline __uasminit u32
build_func(u32 arg
)
228 WARN(arg
& ~FUNC_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
230 return arg
& FUNC_MASK
;
233 static inline __uasminit u32
build_set(u32 arg
)
235 WARN(arg
& ~SET_MASK
, KERN_WARNING
"Micro-assembler field overflow\n");
237 return arg
& SET_MASK
;
241 * The order of opcode arguments is implicitly left to right,
242 * starting with RS and ending with FUNC or IMM.
244 static void __uasminit
build_insn(u32
**buf
, enum opcode opc
, ...)
246 struct insn
*ip
= NULL
;
251 for (i
= 0; insn_table
[i
].opcode
!= insn_invalid
; i
++)
252 if (insn_table
[i
].opcode
== opc
) {
257 if (!ip
|| (opc
== insn_daddiu
&& r4k_daddiu_bug()))
258 panic("Unsupported Micro-assembler instruction %d", opc
);
263 op
|= build_rs(va_arg(ap
, u32
));
265 op
|= build_rt(va_arg(ap
, u32
));
267 op
|= build_rd(va_arg(ap
, u32
));
269 op
|= build_re(va_arg(ap
, u32
));
270 if (ip
->fields
& SIMM
)
271 op
|= build_simm(va_arg(ap
, s32
));
272 if (ip
->fields
& UIMM
)
273 op
|= build_uimm(va_arg(ap
, u32
));
274 if (ip
->fields
& BIMM
)
275 op
|= build_bimm(va_arg(ap
, s32
));
276 if (ip
->fields
& JIMM
)
277 op
|= build_jimm(va_arg(ap
, u32
));
278 if (ip
->fields
& FUNC
)
279 op
|= build_func(va_arg(ap
, u32
));
280 if (ip
->fields
& SET
)
281 op
|= build_set(va_arg(ap
, u32
));
282 if (ip
->fields
& SCIMM
)
283 op
|= build_scimm(va_arg(ap
, u32
));
290 #define I_u1u2u3(op) \
293 build_insn(buf, insn##op, a, b, c); \
295 UASM_EXPORT_SYMBOL(uasm_i##op);
297 #define I_u2u1u3(op) \
300 build_insn(buf, insn##op, b, a, c); \
302 UASM_EXPORT_SYMBOL(uasm_i##op);
304 #define I_u3u1u2(op) \
307 build_insn(buf, insn##op, b, c, a); \
309 UASM_EXPORT_SYMBOL(uasm_i##op);
311 #define I_u1u2s3(op) \
314 build_insn(buf, insn##op, a, b, c); \
316 UASM_EXPORT_SYMBOL(uasm_i##op);
318 #define I_u2s3u1(op) \
321 build_insn(buf, insn##op, c, a, b); \
323 UASM_EXPORT_SYMBOL(uasm_i##op);
325 #define I_u2u1s3(op) \
328 build_insn(buf, insn##op, b, a, c); \
330 UASM_EXPORT_SYMBOL(uasm_i##op);
332 #define I_u2u1msbu3(op) \
335 build_insn(buf, insn##op, b, a, c+d-1, c); \
337 UASM_EXPORT_SYMBOL(uasm_i##op);
339 #define I_u2u1msb32u3(op) \
342 build_insn(buf, insn##op, b, a, c+d-33, c); \
344 UASM_EXPORT_SYMBOL(uasm_i##op);
349 build_insn(buf, insn##op, a, b); \
351 UASM_EXPORT_SYMBOL(uasm_i##op);
356 build_insn(buf, insn##op, a, b); \
358 UASM_EXPORT_SYMBOL(uasm_i##op);
363 build_insn(buf, insn##op, a); \
365 UASM_EXPORT_SYMBOL(uasm_i##op);
370 build_insn(buf, insn##op); \
372 UASM_EXPORT_SYMBOL(uasm_i##op);
428 I_u2u1msb32u3(_dinsm
);
435 #ifdef CONFIG_CPU_CAVIUM_OCTEON
436 #include <asm/octeon/octeon.h>
437 void __uasminit
uasm_i_pref(u32
**buf
, unsigned int a
, signed int b
,
440 if (OCTEON_IS_MODEL(OCTEON_CN63XX_PASS1_X
) && a
<= 24 && a
!= 5)
442 * As per erratum Core-14449, replace prefetches 0-4,
443 * 6-24 with 'pref 28'.
445 build_insn(buf
, insn_pref
, c
, 28, b
);
447 build_insn(buf
, insn_pref
, c
, a
, b
);
449 UASM_EXPORT_SYMBOL(uasm_i_pref
);
455 void __uasminit
uasm_build_label(struct uasm_label
**lab
, u32
*addr
, int lid
)
461 UASM_EXPORT_SYMBOL(uasm_build_label
);
463 int __uasminit
uasm_in_compat_space_p(long addr
)
465 /* Is this address in 32bit compat space? */
467 return (((addr
) & 0xffffffff00000000L
) == 0xffffffff00000000L
);
472 UASM_EXPORT_SYMBOL(uasm_in_compat_space_p
);
474 static int __uasminit
uasm_rel_highest(long val
)
477 return ((((val
+ 0x800080008000L
) >> 48) & 0xffff) ^ 0x8000) - 0x8000;
483 static int __uasminit
uasm_rel_higher(long val
)
486 return ((((val
+ 0x80008000L
) >> 32) & 0xffff) ^ 0x8000) - 0x8000;
492 int __uasminit
uasm_rel_hi(long val
)
494 return ((((val
+ 0x8000L
) >> 16) & 0xffff) ^ 0x8000) - 0x8000;
496 UASM_EXPORT_SYMBOL(uasm_rel_hi
);
498 int __uasminit
uasm_rel_lo(long val
)
500 return ((val
& 0xffff) ^ 0x8000) - 0x8000;
502 UASM_EXPORT_SYMBOL(uasm_rel_lo
);
504 void __uasminit
UASM_i_LA_mostly(u32
**buf
, unsigned int rs
, long addr
)
506 if (!uasm_in_compat_space_p(addr
)) {
507 uasm_i_lui(buf
, rs
, uasm_rel_highest(addr
));
508 if (uasm_rel_higher(addr
))
509 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_higher(addr
));
510 if (uasm_rel_hi(addr
)) {
511 uasm_i_dsll(buf
, rs
, rs
, 16);
512 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_hi(addr
));
513 uasm_i_dsll(buf
, rs
, rs
, 16);
515 uasm_i_dsll32(buf
, rs
, rs
, 0);
517 uasm_i_lui(buf
, rs
, uasm_rel_hi(addr
));
519 UASM_EXPORT_SYMBOL(UASM_i_LA_mostly
);
521 void __uasminit
UASM_i_LA(u32
**buf
, unsigned int rs
, long addr
)
523 UASM_i_LA_mostly(buf
, rs
, addr
);
524 if (uasm_rel_lo(addr
)) {
525 if (!uasm_in_compat_space_p(addr
))
526 uasm_i_daddiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
528 uasm_i_addiu(buf
, rs
, rs
, uasm_rel_lo(addr
));
531 UASM_EXPORT_SYMBOL(UASM_i_LA
);
533 /* Handle relocations. */
535 uasm_r_mips_pc16(struct uasm_reloc
**rel
, u32
*addr
, int lid
)
538 (*rel
)->type
= R_MIPS_PC16
;
542 UASM_EXPORT_SYMBOL(uasm_r_mips_pc16
);
544 static inline void __uasminit
545 __resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
547 long laddr
= (long)lab
->addr
;
548 long raddr
= (long)rel
->addr
;
552 *rel
->addr
|= build_bimm(laddr
- (raddr
+ 4));
556 panic("Unsupported Micro-assembler relocation %d",
562 uasm_resolve_relocs(struct uasm_reloc
*rel
, struct uasm_label
*lab
)
564 struct uasm_label
*l
;
566 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
567 for (l
= lab
; l
->lab
!= UASM_LABEL_INVALID
; l
++)
568 if (rel
->lab
== l
->lab
)
569 __resolve_relocs(rel
, l
);
571 UASM_EXPORT_SYMBOL(uasm_resolve_relocs
);
574 uasm_move_relocs(struct uasm_reloc
*rel
, u32
*first
, u32
*end
, long off
)
576 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++)
577 if (rel
->addr
>= first
&& rel
->addr
< end
)
580 UASM_EXPORT_SYMBOL(uasm_move_relocs
);
583 uasm_move_labels(struct uasm_label
*lab
, u32
*first
, u32
*end
, long off
)
585 for (; lab
->lab
!= UASM_LABEL_INVALID
; lab
++)
586 if (lab
->addr
>= first
&& lab
->addr
< end
)
589 UASM_EXPORT_SYMBOL(uasm_move_labels
);
592 uasm_copy_handler(struct uasm_reloc
*rel
, struct uasm_label
*lab
, u32
*first
,
593 u32
*end
, u32
*target
)
595 long off
= (long)(target
- first
);
597 memcpy(target
, first
, (end
- first
) * sizeof(u32
));
599 uasm_move_relocs(rel
, first
, end
, off
);
600 uasm_move_labels(lab
, first
, end
, off
);
602 UASM_EXPORT_SYMBOL(uasm_copy_handler
);
604 int __uasminit
uasm_insn_has_bdelay(struct uasm_reloc
*rel
, u32
*addr
)
606 for (; rel
->lab
!= UASM_LABEL_INVALID
; rel
++) {
607 if (rel
->addr
== addr
608 && (rel
->type
== R_MIPS_PC16
609 || rel
->type
== R_MIPS_26
))
615 UASM_EXPORT_SYMBOL(uasm_insn_has_bdelay
);
617 /* Convenience functions for labeled branches. */
619 uasm_il_bltz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
621 uasm_r_mips_pc16(r
, *p
, lid
);
622 uasm_i_bltz(p
, reg
, 0);
624 UASM_EXPORT_SYMBOL(uasm_il_bltz
);
627 uasm_il_b(u32
**p
, struct uasm_reloc
**r
, int lid
)
629 uasm_r_mips_pc16(r
, *p
, lid
);
632 UASM_EXPORT_SYMBOL(uasm_il_b
);
635 uasm_il_beqz(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
637 uasm_r_mips_pc16(r
, *p
, lid
);
638 uasm_i_beqz(p
, reg
, 0);
640 UASM_EXPORT_SYMBOL(uasm_il_beqz
);
643 uasm_il_beqzl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
645 uasm_r_mips_pc16(r
, *p
, lid
);
646 uasm_i_beqzl(p
, reg
, 0);
648 UASM_EXPORT_SYMBOL(uasm_il_beqzl
);
651 uasm_il_bne(u32
**p
, struct uasm_reloc
**r
, unsigned int reg1
,
652 unsigned int reg2
, int lid
)
654 uasm_r_mips_pc16(r
, *p
, lid
);
655 uasm_i_bne(p
, reg1
, reg2
, 0);
657 UASM_EXPORT_SYMBOL(uasm_il_bne
);
660 uasm_il_bnez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
662 uasm_r_mips_pc16(r
, *p
, lid
);
663 uasm_i_bnez(p
, reg
, 0);
665 UASM_EXPORT_SYMBOL(uasm_il_bnez
);
668 uasm_il_bgezl(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
670 uasm_r_mips_pc16(r
, *p
, lid
);
671 uasm_i_bgezl(p
, reg
, 0);
673 UASM_EXPORT_SYMBOL(uasm_il_bgezl
);
676 uasm_il_bgez(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
, int lid
)
678 uasm_r_mips_pc16(r
, *p
, lid
);
679 uasm_i_bgez(p
, reg
, 0);
681 UASM_EXPORT_SYMBOL(uasm_il_bgez
);
684 uasm_il_bbit0(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
685 unsigned int bit
, int lid
)
687 uasm_r_mips_pc16(r
, *p
, lid
);
688 uasm_i_bbit0(p
, reg
, bit
, 0);
690 UASM_EXPORT_SYMBOL(uasm_il_bbit0
);
693 uasm_il_bbit1(u32
**p
, struct uasm_reloc
**r
, unsigned int reg
,
694 unsigned int bit
, int lid
)
696 uasm_r_mips_pc16(r
, *p
, lid
);
697 uasm_i_bbit1(p
, reg
, bit
, 0);
699 UASM_EXPORT_SYMBOL(uasm_il_bbit1
);