preamble_patcher_with_stub.cc 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302
  1. // -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*-
  2. /* Copyright (c) 2007, Google Inc.
  3. * All rights reserved.
  4. *
  5. * Redistribution and use in source and binary forms, with or without
  6. * modification, are permitted provided that the following conditions are
  7. * met:
  8. *
  9. * * Redistributions of source code must retain the above copyright
  10. * notice, this list of conditions and the following disclaimer.
  11. * * Redistributions in binary form must reproduce the above
  12. * copyright notice, this list of conditions and the following disclaimer
  13. * in the documentation and/or other materials provided with the
  14. * distribution.
  15. * * Neither the name of Google Inc. nor the names of its
  16. * contributors may be used to endorse or promote products derived from
  17. * this software without specific prior written permission.
  18. *
  19. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. *
  31. * ---
  32. * Author: Joi Sigurdsson
  33. * Author: Scott Francis
  34. *
  35. * Implementation of PreamblePatcher
  36. */
  37. #include "preamble_patcher.h"
  38. #include "mini_disassembler.h"
  39. // Definitions of assembly statements we need
  40. #define ASM_JMP32REL 0xE9
  41. #define ASM_INT3 0xCC
  42. #define ASM_NOP 0x90
  43. // X64 opcodes
  44. #define ASM_MOVRAX_IMM 0xB8
  45. #define ASM_REXW 0x48
  46. #define ASM_JMP 0xFF
  47. #define ASM_JMP_RAX 0xE0
  48. #define ASM_PUSH 0x68
  49. #define ASM_RET 0xC3
  50. namespace sidestep {
  51. SideStepError PreamblePatcher::RawPatchWithStub(
  52. void* target_function,
  53. void* replacement_function,
  54. unsigned char* preamble_stub,
  55. unsigned long stub_size,
  56. unsigned long* bytes_needed) {
  57. if ((NULL == target_function) ||
  58. (NULL == replacement_function) ||
  59. (NULL == preamble_stub)) {
  60. SIDESTEP_ASSERT(false &&
  61. "Invalid parameters - either pTargetFunction or "
  62. "pReplacementFunction or pPreambleStub were NULL.");
  63. return SIDESTEP_INVALID_PARAMETER;
  64. }
  65. // TODO(V7:joi) Siggi and I just had a discussion and decided that both
  66. // patching and unpatching are actually unsafe. We also discussed a
  67. // method of making it safe, which is to freeze all other threads in the
  68. // process, check their thread context to see if their eip is currently
  69. // inside the block of instructions we need to copy to the stub, and if so
  70. // wait a bit and try again, then unfreeze all threads once we've patched.
  71. // Not implementing this for now since we're only using SideStep for unit
  72. // testing, but if we ever use it for production code this is what we
  73. // should do.
  74. //
  75. // NOTE: Stoyan suggests we can write 8 or even 10 bytes atomically using
  76. // FPU instructions, and on newer processors we could use cmpxchg8b or
  77. // cmpxchg16b. So it might be possible to do the patching/unpatching
  78. // atomically and avoid having to freeze other threads. Note though, that
  79. // doing it atomically does not help if one of the other threads happens
  80. // to have its eip in the middle of the bytes you change while you change
  81. // them.
  82. unsigned char* target = reinterpret_cast<unsigned char*>(target_function);
  83. unsigned int required_trampoline_bytes = 0;
  84. const unsigned int kRequiredStubJumpBytes = 5;
  85. const unsigned int kRequiredTargetPatchBytes = 5;
  86. // Initialize the stub with INT3's just in case.
  87. if (stub_size) {
  88. memset(preamble_stub, 0xcc, stub_size);
  89. }
  90. if (kIs64BitBinary) {
  91. // In 64-bit mode JMP instructions are always relative to RIP. If the
  92. // replacement - target offset is > 2GB, we can't JMP to the replacement
  93. // function. In this case, we're going to use a trampoline - that is,
  94. // we're going to do a relative jump to a small chunk of code in the stub
  95. // that will then do the absolute jump to the replacement function. By
  96. // doing this, we only need to patch 5 bytes in the target function, as
  97. // opposed to patching 12 bytes if we were to do an absolute jump.
  98. //
  99. // Note that the first byte of the trampoline is a NOP instruction. This
  100. // is used as a trampoline signature that will be detected when unpatching
  101. // the function.
  102. //
  103. // jmp <trampoline>
  104. //
  105. // trampoline:
  106. // nop
  107. // mov rax, <replacement_function>
  108. // jmp rax
  109. //
  110. __int64 replacement_target_offset = reinterpret_cast<__int64>(
  111. replacement_function) - reinterpret_cast<__int64>(target) - 5;
  112. if (replacement_target_offset > INT_MAX
  113. || replacement_target_offset < INT_MIN) {
  114. // The stub needs to be within 2GB of the target for the trampoline to
  115. // work!
  116. __int64 trampoline_offset = reinterpret_cast<__int64>(preamble_stub)
  117. - reinterpret_cast<__int64>(target) - 5;
  118. if (trampoline_offset > INT_MAX || trampoline_offset < INT_MIN) {
  119. // We're screwed.
  120. SIDESTEP_ASSERT(false
  121. && "Preamble stub is too far from target to patch.");
  122. return SIDESTEP_UNEXPECTED;
  123. }
  124. required_trampoline_bytes = 13;
  125. }
  126. }
  127. // Let's disassemble the preamble of the target function to see if we can
  128. // patch, and to see how much of the preamble we need to take. We need 5
  129. // bytes for our jmp instruction, so let's find the minimum number of
  130. // instructions to get 5 bytes.
  131. MiniDisassembler disassembler;
  132. unsigned int preamble_bytes = 0;
  133. unsigned int stub_bytes = 0;
  134. while (preamble_bytes < kRequiredTargetPatchBytes) {
  135. unsigned int cur_bytes = 0;
  136. InstructionType instruction_type =
  137. disassembler.Disassemble(target + preamble_bytes, cur_bytes);
  138. if (IT_JUMP == instruction_type) {
  139. unsigned int jump_bytes = 0;
  140. SideStepError jump_ret = SIDESTEP_JUMP_INSTRUCTION;
  141. if (IsShortConditionalJump(target + preamble_bytes, cur_bytes)) {
  142. jump_ret = PatchShortConditionalJump(target + preamble_bytes, cur_bytes,
  143. preamble_stub + stub_bytes,
  144. &jump_bytes,
  145. stub_size - stub_bytes);
  146. } else if (IsShortJump(target + preamble_bytes, cur_bytes)) {
  147. jump_ret = PatchShortJump(target + preamble_bytes, cur_bytes,
  148. preamble_stub + stub_bytes,
  149. &jump_bytes,
  150. stub_size - stub_bytes);
  151. } else if (IsNearConditionalJump(target + preamble_bytes, cur_bytes) ||
  152. IsNearRelativeJump(target + preamble_bytes, cur_bytes) ||
  153. IsNearAbsoluteCall(target + preamble_bytes, cur_bytes) ||
  154. IsNearRelativeCall(target + preamble_bytes, cur_bytes)) {
  155. jump_ret = PatchNearJumpOrCall(target + preamble_bytes, cur_bytes,
  156. preamble_stub + stub_bytes, &jump_bytes,
  157. stub_size - stub_bytes);
  158. }
  159. if (jump_ret != SIDESTEP_SUCCESS) {
  160. SIDESTEP_ASSERT(false &&
  161. "Unable to patch because there is an unhandled branch "
  162. "instruction in the initial preamble bytes.");
  163. return SIDESTEP_JUMP_INSTRUCTION;
  164. }
  165. stub_bytes += jump_bytes;
  166. } else if (IT_RETURN == instruction_type) {
  167. SIDESTEP_ASSERT(false &&
  168. "Unable to patch because function is too short");
  169. return SIDESTEP_FUNCTION_TOO_SMALL;
  170. } else if (IT_GENERIC == instruction_type) {
  171. if (IsMovWithDisplacement(target + preamble_bytes, cur_bytes)) {
  172. unsigned int mov_bytes = 0;
  173. if (PatchMovWithDisplacement(target + preamble_bytes, cur_bytes,
  174. preamble_stub + stub_bytes, &mov_bytes,
  175. stub_size - stub_bytes)
  176. != SIDESTEP_SUCCESS) {
  177. return SIDESTEP_UNSUPPORTED_INSTRUCTION;
  178. }
  179. stub_bytes += mov_bytes;
  180. } else {
  181. memcpy(reinterpret_cast<void*>(preamble_stub + stub_bytes),
  182. reinterpret_cast<void*>(target + preamble_bytes), cur_bytes);
  183. stub_bytes += cur_bytes;
  184. }
  185. } else {
  186. SIDESTEP_ASSERT(false &&
  187. "Disassembler encountered unsupported instruction "
  188. "(either unused or unknown");
  189. return SIDESTEP_UNSUPPORTED_INSTRUCTION;
  190. }
  191. preamble_bytes += cur_bytes;
  192. }
  193. if (NULL != bytes_needed)
  194. *bytes_needed = stub_bytes + kRequiredStubJumpBytes
  195. + required_trampoline_bytes;
  196. // Inv: cbPreamble is the number of bytes (at least 5) that we need to take
  197. // from the preamble to have whole instructions that are 5 bytes or more
  198. // in size total. The size of the stub required is cbPreamble +
  199. // kRequiredStubJumpBytes (5) + required_trampoline_bytes (0 or 13)
  200. if (stub_bytes + kRequiredStubJumpBytes + required_trampoline_bytes
  201. > stub_size) {
  202. SIDESTEP_ASSERT(false);
  203. return SIDESTEP_INSUFFICIENT_BUFFER;
  204. }
  205. // Now, make a jmp instruction to the rest of the target function (minus the
  206. // preamble bytes we moved into the stub) and copy it into our preamble-stub.
  207. // find address to jump to, relative to next address after jmp instruction
  208. #ifdef _MSC_VER
  209. #pragma warning(push)
  210. #pragma warning(disable:4244)
  211. #endif
  212. int relative_offset_to_target_rest
  213. = ((reinterpret_cast<unsigned char*>(target) + preamble_bytes) -
  214. (preamble_stub + stub_bytes + kRequiredStubJumpBytes));
  215. #ifdef _MSC_VER
  216. #pragma warning(pop)
  217. #endif
  218. // jmp (Jump near, relative, displacement relative to next instruction)
  219. preamble_stub[stub_bytes] = ASM_JMP32REL;
  220. // copy the address
  221. memcpy(reinterpret_cast<void*>(preamble_stub + stub_bytes + 1),
  222. reinterpret_cast<void*>(&relative_offset_to_target_rest), 4);
  223. if (kIs64BitBinary && required_trampoline_bytes != 0) {
  224. // Construct the trampoline
  225. unsigned int trampoline_pos = stub_bytes + kRequiredStubJumpBytes;
  226. preamble_stub[trampoline_pos] = ASM_NOP;
  227. preamble_stub[trampoline_pos + 1] = ASM_REXW;
  228. preamble_stub[trampoline_pos + 2] = ASM_MOVRAX_IMM;
  229. memcpy(reinterpret_cast<void*>(preamble_stub + trampoline_pos + 3),
  230. reinterpret_cast<void*>(&replacement_function),
  231. sizeof(void *));
  232. preamble_stub[trampoline_pos + 11] = ASM_JMP;
  233. preamble_stub[trampoline_pos + 12] = ASM_JMP_RAX;
  234. // Now update replacement_function to point to the trampoline
  235. replacement_function = preamble_stub + trampoline_pos;
  236. }
  237. // Inv: preamble_stub points to assembly code that will execute the
  238. // original function by first executing the first cbPreamble bytes of the
  239. // preamble, then jumping to the rest of the function.
  240. // Overwrite the first 5 bytes of the target function with a jump to our
  241. // replacement function.
  242. // (Jump near, relative, displacement relative to next instruction)
  243. target[0] = ASM_JMP32REL;
  244. // Find offset from instruction after jmp, to the replacement function.
  245. #ifdef _MSC_VER
  246. #pragma warning(push)
  247. #pragma warning(disable:4244)
  248. #endif
  249. int offset_to_replacement_function =
  250. reinterpret_cast<unsigned char*>(replacement_function) -
  251. reinterpret_cast<unsigned char*>(target) - 5;
  252. #ifdef _MSC_VER
  253. #pragma warning(pop)
  254. #endif
  255. // complete the jmp instruction
  256. memcpy(reinterpret_cast<void*>(target + 1),
  257. reinterpret_cast<void*>(&offset_to_replacement_function), 4);
  258. // Set any remaining bytes that were moved to the preamble-stub to INT3 so
  259. // as not to cause confusion (otherwise you might see some strange
  260. // instructions if you look at the disassembly, or even invalid
  261. // instructions). Also, by doing this, we will break into the debugger if
  262. // some code calls into this portion of the code. If this happens, it
  263. // means that this function cannot be patched using this patcher without
  264. // further thought.
  265. if (preamble_bytes > kRequiredTargetPatchBytes) {
  266. memset(reinterpret_cast<void*>(target + kRequiredTargetPatchBytes),
  267. ASM_INT3, preamble_bytes - kRequiredTargetPatchBytes);
  268. }
  269. // Inv: The memory pointed to by target_function now points to a relative
  270. // jump instruction that jumps over to the preamble_stub. The preamble
  271. // stub contains the first stub_size bytes of the original target
  272. // function's preamble code, followed by a relative jump back to the next
  273. // instruction after the first cbPreamble bytes.
  274. //
  275. // In 64-bit mode the memory pointed to by target_function *may* point to a
  276. // relative jump instruction that jumps to a trampoline which will then
  277. // perform an absolute jump to the replacement function. The preamble stub
  278. // still contains the original target function's preamble code, followed by a
  279. // jump back to the instructions after the first preamble bytes.
  280. //
  281. return SIDESTEP_SUCCESS;
  282. }
  283. }; // namespace sidestep