arm64 front end: add spec rules for {EQ,NE} after {LOGIC32,LOGIC64}.
[valgrind.git] / memcheck / mc_main_asm.c
bloba853ccd4c45cda7858164359e5f9d932d5baa378
1 /* -*- mode: C; c-basic-offset: 3; -*- */
3 /*--------------------------------------------------------------------*/
4 /*--- MemCheck: some non-generic asm implementations of mc_main.c */
5 /*--- functions ---*/
6 /*--- mc_main_asm.c ---*/
7 /*--------------------------------------------------------------------*/
9 /*
10 This file is part of MemCheck, a heavyweight Valgrind tool for
11 detecting memory errors.
13 Copyright (C) 2000-2018 Julian Seward
14 jseward@acm.org
16 This program is free software; you can redistribute it and/or
17 modify it under the terms of the GNU General Public License as
18 published by the Free Software Foundation; either version 2 of the
19 License, or (at your option) any later version.
21 This program is distributed in the hope that it will be useful, but
22 WITHOUT ANY WARRANTY; without even the implied warranty of
23 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
24 General Public License for more details.
26 You should have received a copy of the GNU General Public License
27 along with this program; if not, write to the Free Software
28 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
29 02111-1307, USA.
31 The GNU General Public License is contained in the file COPYING.
34 /* Having these in mc_main.c gives undefined references at link time,
35 when compiling with lto. Having them in a separate file solves this.
36 Also, for some toolchain, we might maybe need to disable lto. */
38 // A bunch of include only needed for mc_include.h
39 #include "pub_tool_basics.h"
40 #include "pub_tool_poolalloc.h"
41 #include "pub_tool_hashtable.h"
42 #include "pub_tool_tooliface.h"
44 #include "mc_include.h"
46 // Non-generic assembly for arm32-linux
47 #if ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
48 && defined(VGP_arm_linux)
49 __asm__( /* Derived from the 32 bit assembly helper */
50 ".text \n"
51 ".align 2 \n"
52 ".global vgMemCheck_helperc_LOADV64le \n"
53 ".type vgMemCheck_helperc_LOADV64le, %function \n"
54 "vgMemCheck_helperc_LOADV64le: \n"
55 " tst r0, #7 \n"
56 " movw r3, #:lower16:primary_map \n"
57 " bne .LLV64LEc4 \n" // if misaligned
58 " lsr r2, r0, #16 \n"
59 " movt r3, #:upper16:primary_map \n"
60 " ldr r2, [r3, r2, lsl #2] \n"
61 " uxth r1, r0 \n" // r1 is 0-(16)-0 X-(13)-X 000
62 " movw r3, #0xAAAA \n"
63 " lsr r1, r1, #2 \n" // r1 is 0-(16)-0 00 X-(13)-X 0
64 " ldrh r1, [r2, r1] \n"
65 " cmp r1, r3 \n" // 0xAAAA == VA_BITS16_DEFINED
66 " bne .LLV64LEc0 \n" // if !all_defined
67 " mov r1, #0x0 \n" // 0x0 == V_BITS32_DEFINED
68 " mov r0, #0x0 \n" // 0x0 == V_BITS32_DEFINED
69 " bx lr \n"
70 ".LLV64LEc0: \n"
71 " movw r3, #0x5555 \n"
72 " cmp r1, r3 \n" // 0x5555 == VA_BITS16_UNDEFINED
73 " bne .LLV64LEc4 \n" // if !all_undefined
74 " mov r1, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
75 " mov r0, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
76 " bx lr \n"
77 ".LLV64LEc4: \n"
78 " push {r4, lr} \n"
79 " mov r2, #0 \n"
80 " mov r1, #64 \n"
81 " bl mc_LOADVn_slow \n"
82 " pop {r4, pc} \n"
83 ".size vgMemCheck_helperc_LOADV64le, .-vgMemCheck_helperc_LOADV64le \n"
84 ".previous\n"
87 #elif ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
88 && (defined(VGP_x86_linux) || defined(VGP_x86_solaris))
89 __asm__(
90 ".text\n"
91 ".align 16\n"
92 ".global vgMemCheck_helperc_LOADV64le\n"
93 ".type vgMemCheck_helperc_LOADV64le, @function\n"
94 "vgMemCheck_helperc_LOADV64le:\n"
95 " test $0x7, %eax\n"
96 " jne .LLV64LE2\n" /* jump if not aligned */
97 " mov %eax, %ecx\n"
98 " movzwl %ax, %edx\n"
99 " shr $0x10, %ecx\n"
100 " mov primary_map(,%ecx,4), %ecx\n"
101 " shr $0x3, %edx\n"
102 " movzwl (%ecx,%edx,2), %edx\n"
103 " cmp $0xaaaa, %edx\n"
104 " jne .LLV64LE1\n" /* jump if not all defined */
105 " xor %eax, %eax\n" /* return 0 in edx:eax */
106 " xor %edx, %edx\n"
107 " ret\n"
108 ".LLV64LE1:\n"
109 " cmp $0x5555, %edx\n"
110 " jne .LLV64LE2\n" /* jump if not all undefined */
111 " or $0xffffffff, %eax\n" /* else return all bits set in edx:eax */
112 " or $0xffffffff, %edx\n"
113 " ret\n"
114 ".LLV64LE2:\n"
115 " xor %ecx, %ecx\n" /* tail call to mc_LOADVn_slow(a, 64, 0) */
116 " mov $64, %edx\n"
117 " jmp mc_LOADVn_slow\n"
118 ".size vgMemCheck_helperc_LOADV64le, .-vgMemCheck_helperc_LOADV64le\n"
119 ".previous\n"
122 #else
123 // Generic for all platforms except {arm32,x86}-linux and x86-solaris
124 // is in mc_main.c
125 #endif
128 // Non-generic assembly for arm32-linux
129 #if ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
130 && defined(VGP_arm_linux)
131 __asm__( /* Derived from NCode template */
132 ".text \n"
133 ".align 2 \n"
134 ".global vgMemCheck_helperc_LOADV32le \n"
135 ".type vgMemCheck_helperc_LOADV32le, %function \n"
136 "vgMemCheck_helperc_LOADV32le: \n"
137 " tst r0, #3 \n" // 1
138 " movw r3, #:lower16:primary_map \n" // 1
139 " bne .LLV32LEc4 \n" // 2 if misaligned
140 " lsr r2, r0, #16 \n" // 3
141 " movt r3, #:upper16:primary_map \n" // 3
142 " ldr r2, [r3, r2, lsl #2] \n" // 4
143 " uxth r1, r0 \n" // 4
144 " ldrb r1, [r2, r1, lsr #2] \n" // 5
145 " cmp r1, #0xAA \n" // 6 0xAA == VA_BITS8_DEFINED
146 " bne .LLV32LEc0 \n" // 7 if !all_defined
147 " mov r0, #0x0 \n" // 8 0x0 == V_BITS32_DEFINED
148 " bx lr \n" // 9
149 ".LLV32LEc0: \n"
150 " cmp r1, #0x55 \n" // 0x55 == VA_BITS8_UNDEFINED
151 " bne .LLV32LEc4 \n" // if !all_undefined
152 " mov r0, #0xFFFFFFFF \n" // 0xFFFFFFFF == V_BITS32_UNDEFINED
153 " bx lr \n"
154 ".LLV32LEc4: \n"
155 " push {r4, lr} \n"
156 " mov r2, #0 \n"
157 " mov r1, #32 \n"
158 " bl mc_LOADVn_slow \n"
159 " pop {r4, pc} \n"
160 ".size vgMemCheck_helperc_LOADV32le, .-vgMemCheck_helperc_LOADV32le \n"
161 ".previous\n"
164 #elif ENABLE_ASSEMBLY_HELPERS && defined(PERF_FAST_LOADV) \
165 && (defined(VGP_x86_linux) || defined(VGP_x86_solaris))
166 __asm__(
167 ".text\n"
168 ".align 16\n"
169 ".global vgMemCheck_helperc_LOADV32le\n"
170 ".type vgMemCheck_helperc_LOADV32le, @function\n"
171 "vgMemCheck_helperc_LOADV32le:\n"
172 " test $0x3, %eax\n"
173 " jnz .LLV32LE2\n" /* jump if misaligned */
174 " mov %eax, %edx\n"
175 " shr $16, %edx\n"
176 " mov primary_map(,%edx,4), %ecx\n"
177 " movzwl %ax, %edx\n"
178 " shr $2, %edx\n"
179 " movzbl (%ecx,%edx,1), %edx\n"
180 " cmp $0xaa, %edx\n" /* compare to VA_BITS8_DEFINED */
181 " jne .LLV32LE1\n" /* jump if not completely defined */
182 " xor %eax, %eax\n" /* else return V_BITS32_DEFINED */
183 " ret\n"
184 ".LLV32LE1:\n"
185 " cmp $0x55, %edx\n" /* compare to VA_BITS8_UNDEFINED */
186 " jne .LLV32LE2\n" /* jump if not completely undefined */
187 " or $0xffffffff, %eax\n" /* else return V_BITS32_UNDEFINED */
188 " ret\n"
189 ".LLV32LE2:\n"
190 " xor %ecx, %ecx\n" /* tail call mc_LOADVn_slow(a, 32, 0) */
191 " mov $32, %edx\n"
192 " jmp mc_LOADVn_slow\n"
193 ".size vgMemCheck_helperc_LOADV32le, .-vgMemCheck_helperc_LOADV32le\n"
194 ".previous\n"
197 #else
198 // Generic for all platforms except {arm32,x86}-linux and x86-solaris
199 // is in mc_main.c
200 #endif
202 /*--------------------------------------------------------------------*/
203 /*--- end ---*/
204 /*--------------------------------------------------------------------*/