Roll skia to r1241.
[chromium-blink-merge.git] / tools / memory_watcher / preamble_patcher_with_stub.cc
blobdc388720143552cf4bc616a29694868cead89b37
1 /* Copyright (c) 2007, Google Inc.
2 * All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
8 * * Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * * Redistributions in binary form must reproduce the above
11 * copyright notice, this list of conditions and the following disclaimer
12 * in the documentation and/or other materials provided with the
13 * distribution.
14 * * Neither the name of Google Inc. nor the names of its
15 * contributors may be used to endorse or promote products derived from
16 * this software without specific prior written permission.
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 * ---
32 * Implementation of PreamblePatcher
35 #include "preamble_patcher.h"
37 #include "mini_disassembler.h"
39 // Definitions of assembly statements we need
40 #define ASM_JMP32REL 0xE9
41 #define ASM_INT3 0xCC
43 namespace sidestep {
45 SideStepError PreamblePatcher::RawPatchWithStub(
46 void* target_function,
47 void *replacement_function,
48 unsigned char* preamble_stub,
49 unsigned long stub_size,
50 unsigned long* bytes_needed) {
51 if ((NULL == target_function) ||
52 (NULL == replacement_function) ||
53 (NULL == preamble_stub)) {
54 ASSERT(false, "Invalid parameters - either pTargetFunction or "
55 "pReplacementFunction or pPreambleStub were NULL.");
56 return SIDESTEP_INVALID_PARAMETER;
59 // TODO(V7:joi) Siggi and I just had a discussion and decided that both
60 // patching and unpatching are actually unsafe. We also discussed a
61 // method of making it safe, which is to freeze all other threads in the
62 // process, check their thread context to see if their eip is currently
63 // inside the block of instructions we need to copy to the stub, and if so
64 // wait a bit and try again, then unfreeze all threads once we've patched.
65 // Not implementing this for now since we're only using SideStep for unit
66 // testing, but if we ever use it for production code this is what we
67 // should do.
69 // NOTE: Stoyan suggests we can write 8 or even 10 bytes atomically using
70 // FPU instructions, and on newer processors we could use cmpxchg8b or
71 // cmpxchg16b. So it might be possible to do the patching/unpatching
72 // atomically and avoid having to freeze other threads. Note though, that
73 // doing it atomically does not help if one of the other threads happens
74 // to have its eip in the middle of the bytes you change while you change
75 // them.
76 unsigned char* target = reinterpret_cast<unsigned char*>(target_function);
78 // First, deal with a special case that we see with functions that
79 // point into an IAT table (including functions linked statically
80 // into the application): these function already starts with
81 // ASM_JMP32REL. For instance, malloc() might be implemented as a
82 // JMP to __malloc(). In that case, we replace the destination of
83 // the JMP (__malloc), rather than the JMP itself (malloc). This
84 // way we get the correct behavior no matter how malloc gets called.
85 if (target[0] == ASM_JMP32REL) {
86 // target[1-4] holds the place the jmp goes to, but it's
87 // relative to the next instruction.
88 int relative_offset; // Windows guarantees int is 4 bytes
89 ASSERT1(sizeof(relative_offset) == 4);
90 memcpy(reinterpret_cast<void*>(&relative_offset),
91 reinterpret_cast<void*>(target + 1), 4);
92 // I'd like to just say "target = target + 5 + relative_offset" here, but
93 // I can't, because the new target will need to have its protections set.
94 return RawPatchWithStubAndProtections(target + 5 + relative_offset,
95 replacement_function, preamble_stub,
96 stub_size, bytes_needed);
99 // Let's disassemble the preamble of the target function to see if we can
100 // patch, and to see how much of the preamble we need to take. We need 5
101 // bytes for our jmp instruction, so let's find the minimum number of
102 // instructions to get 5 bytes.
103 MiniDisassembler disassembler;
104 unsigned int preamble_bytes = 0;
105 while (preamble_bytes < 5) {
106 InstructionType instruction_type =
107 disassembler.Disassemble(target + preamble_bytes, preamble_bytes);
108 if (IT_JUMP == instruction_type) {
109 ASSERT(false, "Unable to patch because there is a jump instruction "
110 "in the first 5 bytes.");
111 return SIDESTEP_JUMP_INSTRUCTION;
112 } else if (IT_RETURN == instruction_type) {
113 ASSERT(false, "Unable to patch because function is too short");
114 return SIDESTEP_FUNCTION_TOO_SMALL;
115 } else if (IT_GENERIC != instruction_type) {
116 ASSERT(false, "Disassembler encountered unsupported instruction "
117 "(either unused or unknown)");
118 return SIDESTEP_UNSUPPORTED_INSTRUCTION;
122 if (NULL != bytes_needed)
123 *bytes_needed = preamble_bytes + 5;
125 // Inv: cbPreamble is the number of bytes (at least 5) that we need to take
126 // from the preamble to have whole instructions that are 5 bytes or more
127 // in size total. The size of the stub required is cbPreamble + size of
128 // jmp (5)
129 if (preamble_bytes + 5 > stub_size) {
130 ASSERT1(false);
131 return SIDESTEP_INSUFFICIENT_BUFFER;
134 // First, copy the preamble that we will overwrite.
135 memcpy(reinterpret_cast<void*>(preamble_stub),
136 reinterpret_cast<void*>(target), preamble_bytes);
138 // Now, make a jmp instruction to the rest of the target function (minus the
139 // preamble bytes we moved into the stub) and copy it into our preamble-stub.
140 // find address to jump to, relative to next address after jmp instruction
141 #ifdef _MSC_VER
142 #pragma warning(push)
143 #pragma warning(disable:4244)
144 #endif
145 int relative_offset_to_target_rest
146 = ((reinterpret_cast<unsigned char*>(target) + preamble_bytes) -
147 (preamble_stub + preamble_bytes + 5));
148 #ifdef _MSC_VER
149 #pragma warning(pop)
150 #endif
151 // jmp (Jump near, relative, displacement relative to next instruction)
152 preamble_stub[preamble_bytes] = ASM_JMP32REL;
153 // copy the address
154 memcpy(reinterpret_cast<void*>(preamble_stub + preamble_bytes + 1),
155 reinterpret_cast<void*>(&relative_offset_to_target_rest), 4);
157 // Inv: preamble_stub points to assembly code that will execute the
158 // original function by first executing the first cbPreamble bytes of the
159 // preamble, then jumping to the rest of the function.
161 // Overwrite the first 5 bytes of the target function with a jump to our
162 // replacement function.
163 // (Jump near, relative, displacement relative to next instruction)
164 target[0] = ASM_JMP32REL;
166 // Find offset from instruction after jmp, to the replacement function.
167 #ifdef _MSC_VER
168 #pragma warning(push)
169 #pragma warning(disable:4244)
170 #endif
171 int offset_to_replacement_function =
172 reinterpret_cast<unsigned char*>(replacement_function) -
173 reinterpret_cast<unsigned char*>(target) - 5;
174 #ifdef _MSC_VER
175 #pragma warning(pop)
176 #endif
177 // complete the jmp instruction
178 memcpy(reinterpret_cast<void*>(target + 1),
179 reinterpret_cast<void*>(&offset_to_replacement_function), 4);
180 // Set any remaining bytes that were moved to the preamble-stub to INT3 so
181 // as not to cause confusion (otherwise you might see some strange
182 // instructions if you look at the disassembly, or even invalid
183 // instructions). Also, by doing this, we will break into the debugger if
184 // some code calls into this portion of the code. If this happens, it
185 // means that this function cannot be patched using this patcher without
186 // further thought.
187 if (preamble_bytes > 5) {
188 memset(reinterpret_cast<void*>(target + 5), ASM_INT3, preamble_bytes - 5);
191 // Inv: The memory pointed to by target_function now points to a relative
192 // jump instruction that jumps over to the preamble_stub. The preamble
193 // stub contains the first stub_size bytes of the original target
194 // function's preamble code, followed by a relative jump back to the next
195 // instruction after the first cbPreamble bytes.
197 return SIDESTEP_SUCCESS;
200 }; // namespace sidestep