1*68d75effSDimitry Andric //===-- xray_interface.cpp --------------------------------------*- C++ -*-===// 2*68d75effSDimitry Andric // 3*68d75effSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4*68d75effSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 5*68d75effSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6*68d75effSDimitry Andric // 7*68d75effSDimitry Andric //===----------------------------------------------------------------------===// 8*68d75effSDimitry Andric // 9*68d75effSDimitry Andric // This file is a part of XRay, a dynamic runtime instrumentation system. 10*68d75effSDimitry Andric // 11*68d75effSDimitry Andric // Implementation of the API functions. 12*68d75effSDimitry Andric // 13*68d75effSDimitry Andric //===----------------------------------------------------------------------===// 14*68d75effSDimitry Andric 15*68d75effSDimitry Andric #include "xray_interface_internal.h" 16*68d75effSDimitry Andric 17*68d75effSDimitry Andric #include <cstdint> 18*68d75effSDimitry Andric #include <cstdio> 19*68d75effSDimitry Andric #include <errno.h> 20*68d75effSDimitry Andric #include <limits> 21*68d75effSDimitry Andric #include <string.h> 22*68d75effSDimitry Andric #include <sys/mman.h> 23*68d75effSDimitry Andric 24*68d75effSDimitry Andric #if SANITIZER_FUCHSIA 25*68d75effSDimitry Andric #include <zircon/process.h> 26*68d75effSDimitry Andric #include <zircon/sanitizer.h> 27*68d75effSDimitry Andric #include <zircon/status.h> 28*68d75effSDimitry Andric #include <zircon/syscalls.h> 29*68d75effSDimitry Andric #endif 30*68d75effSDimitry Andric 31*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_addrhashmap.h" 32*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_common.h" 33*68d75effSDimitry Andric 34*68d75effSDimitry Andric #include "xray_defs.h" 35*68d75effSDimitry Andric #include "xray_flags.h" 36*68d75effSDimitry Andric 37*68d75effSDimitry Andric extern __sanitizer::SpinMutex XRayInstrMapMutex; 38*68d75effSDimitry Andric extern __sanitizer::atomic_uint8_t XRayInitialized; 39*68d75effSDimitry Andric extern __xray::XRaySledMap XRayInstrMap; 40*68d75effSDimitry Andric 41*68d75effSDimitry Andric namespace __xray { 42*68d75effSDimitry Andric 43*68d75effSDimitry Andric #if defined(__x86_64__) 44*68d75effSDimitry Andric static const int16_t cSledLength = 12; 45*68d75effSDimitry Andric #elif defined(__aarch64__) 46*68d75effSDimitry Andric static const int16_t cSledLength = 32; 47*68d75effSDimitry Andric #elif defined(__arm__) 48*68d75effSDimitry Andric static const int16_t cSledLength = 28; 49*68d75effSDimitry Andric #elif SANITIZER_MIPS32 50*68d75effSDimitry Andric static const int16_t cSledLength = 48; 51*68d75effSDimitry Andric #elif SANITIZER_MIPS64 52*68d75effSDimitry Andric static const int16_t cSledLength = 64; 53*68d75effSDimitry Andric #elif defined(__powerpc64__) 54*68d75effSDimitry Andric static const int16_t cSledLength = 8; 55*68d75effSDimitry Andric #else 56*68d75effSDimitry Andric #error "Unsupported CPU Architecture" 57*68d75effSDimitry Andric #endif /* CPU architecture */ 58*68d75effSDimitry Andric 59*68d75effSDimitry Andric // This is the function to call when we encounter the entry or exit sleds. 60*68d75effSDimitry Andric atomic_uintptr_t XRayPatchedFunction{0}; 61*68d75effSDimitry Andric 62*68d75effSDimitry Andric // This is the function to call from the arg1-enabled sleds/trampolines. 63*68d75effSDimitry Andric atomic_uintptr_t XRayArgLogger{0}; 64*68d75effSDimitry Andric 65*68d75effSDimitry Andric // This is the function to call when we encounter a custom event log call. 66*68d75effSDimitry Andric atomic_uintptr_t XRayPatchedCustomEvent{0}; 67*68d75effSDimitry Andric 68*68d75effSDimitry Andric // This is the function to call when we encounter a typed event log call. 69*68d75effSDimitry Andric atomic_uintptr_t XRayPatchedTypedEvent{0}; 70*68d75effSDimitry Andric 71*68d75effSDimitry Andric // This is the global status to determine whether we are currently 72*68d75effSDimitry Andric // patching/unpatching. 73*68d75effSDimitry Andric atomic_uint8_t XRayPatching{0}; 74*68d75effSDimitry Andric 75*68d75effSDimitry Andric struct TypeDescription { 76*68d75effSDimitry Andric uint32_t type_id; 77*68d75effSDimitry Andric std::size_t description_string_length; 78*68d75effSDimitry Andric }; 79*68d75effSDimitry Andric 80*68d75effSDimitry Andric using TypeDescriptorMapType = AddrHashMap<TypeDescription, 11>; 81*68d75effSDimitry Andric // An address map from immutable descriptors to type ids. 82*68d75effSDimitry Andric TypeDescriptorMapType TypeDescriptorAddressMap{}; 83*68d75effSDimitry Andric 84*68d75effSDimitry Andric atomic_uint32_t TypeEventDescriptorCounter{0}; 85*68d75effSDimitry Andric 86*68d75effSDimitry Andric // MProtectHelper is an RAII wrapper for calls to mprotect(...) that will 87*68d75effSDimitry Andric // undo any successful mprotect(...) changes. This is used to make a page 88*68d75effSDimitry Andric // writeable and executable, and upon destruction if it was successful in 89*68d75effSDimitry Andric // doing so returns the page into a read-only and executable page. 90*68d75effSDimitry Andric // 91*68d75effSDimitry Andric // This is only used specifically for runtime-patching of the XRay 92*68d75effSDimitry Andric // instrumentation points. This assumes that the executable pages are 93*68d75effSDimitry Andric // originally read-and-execute only. 94*68d75effSDimitry Andric class MProtectHelper { 95*68d75effSDimitry Andric void *PageAlignedAddr; 96*68d75effSDimitry Andric std::size_t MProtectLen; 97*68d75effSDimitry Andric bool MustCleanup; 98*68d75effSDimitry Andric 99*68d75effSDimitry Andric public: 100*68d75effSDimitry Andric explicit MProtectHelper(void *PageAlignedAddr, 101*68d75effSDimitry Andric std::size_t MProtectLen, 102*68d75effSDimitry Andric std::size_t PageSize) XRAY_NEVER_INSTRUMENT 103*68d75effSDimitry Andric : PageAlignedAddr(PageAlignedAddr), 104*68d75effSDimitry Andric MProtectLen(MProtectLen), 105*68d75effSDimitry Andric MustCleanup(false) { 106*68d75effSDimitry Andric #if SANITIZER_FUCHSIA 107*68d75effSDimitry Andric MProtectLen = RoundUpTo(MProtectLen, PageSize); 108*68d75effSDimitry Andric #endif 109*68d75effSDimitry Andric } 110*68d75effSDimitry Andric 111*68d75effSDimitry Andric int MakeWriteable() XRAY_NEVER_INSTRUMENT { 112*68d75effSDimitry Andric #if SANITIZER_FUCHSIA 113*68d75effSDimitry Andric auto R = __sanitizer_change_code_protection( 114*68d75effSDimitry Andric reinterpret_cast<uintptr_t>(PageAlignedAddr), MProtectLen, true); 115*68d75effSDimitry Andric if (R != ZX_OK) { 116*68d75effSDimitry Andric Report("XRay: cannot change code protection: %s\n", 117*68d75effSDimitry Andric _zx_status_get_string(R)); 118*68d75effSDimitry Andric return -1; 119*68d75effSDimitry Andric } 120*68d75effSDimitry Andric MustCleanup = true; 121*68d75effSDimitry Andric return 0; 122*68d75effSDimitry Andric #else 123*68d75effSDimitry Andric auto R = mprotect(PageAlignedAddr, MProtectLen, 124*68d75effSDimitry Andric PROT_READ | PROT_WRITE | PROT_EXEC); 125*68d75effSDimitry Andric if (R != -1) 126*68d75effSDimitry Andric MustCleanup = true; 127*68d75effSDimitry Andric return R; 128*68d75effSDimitry Andric #endif 129*68d75effSDimitry Andric } 130*68d75effSDimitry Andric 131*68d75effSDimitry Andric ~MProtectHelper() XRAY_NEVER_INSTRUMENT { 132*68d75effSDimitry Andric if (MustCleanup) { 133*68d75effSDimitry Andric #if SANITIZER_FUCHSIA 134*68d75effSDimitry Andric auto R = __sanitizer_change_code_protection( 135*68d75effSDimitry Andric reinterpret_cast<uintptr_t>(PageAlignedAddr), MProtectLen, false); 136*68d75effSDimitry Andric if (R != ZX_OK) { 137*68d75effSDimitry Andric Report("XRay: cannot change code protection: %s\n", 138*68d75effSDimitry Andric _zx_status_get_string(R)); 139*68d75effSDimitry Andric } 140*68d75effSDimitry Andric #else 141*68d75effSDimitry Andric mprotect(PageAlignedAddr, MProtectLen, PROT_READ | PROT_EXEC); 142*68d75effSDimitry Andric #endif 143*68d75effSDimitry Andric } 144*68d75effSDimitry Andric } 145*68d75effSDimitry Andric }; 146*68d75effSDimitry Andric 147*68d75effSDimitry Andric namespace { 148*68d75effSDimitry Andric 149*68d75effSDimitry Andric bool patchSled(const XRaySledEntry &Sled, bool Enable, 150*68d75effSDimitry Andric int32_t FuncId) XRAY_NEVER_INSTRUMENT { 151*68d75effSDimitry Andric bool Success = false; 152*68d75effSDimitry Andric switch (Sled.Kind) { 153*68d75effSDimitry Andric case XRayEntryType::ENTRY: 154*68d75effSDimitry Andric Success = patchFunctionEntry(Enable, FuncId, Sled, __xray_FunctionEntry); 155*68d75effSDimitry Andric break; 156*68d75effSDimitry Andric case XRayEntryType::EXIT: 157*68d75effSDimitry Andric Success = patchFunctionExit(Enable, FuncId, Sled); 158*68d75effSDimitry Andric break; 159*68d75effSDimitry Andric case XRayEntryType::TAIL: 160*68d75effSDimitry Andric Success = patchFunctionTailExit(Enable, FuncId, Sled); 161*68d75effSDimitry Andric break; 162*68d75effSDimitry Andric case XRayEntryType::LOG_ARGS_ENTRY: 163*68d75effSDimitry Andric Success = patchFunctionEntry(Enable, FuncId, Sled, __xray_ArgLoggerEntry); 164*68d75effSDimitry Andric break; 165*68d75effSDimitry Andric case XRayEntryType::CUSTOM_EVENT: 166*68d75effSDimitry Andric Success = patchCustomEvent(Enable, FuncId, Sled); 167*68d75effSDimitry Andric break; 168*68d75effSDimitry Andric case XRayEntryType::TYPED_EVENT: 169*68d75effSDimitry Andric Success = patchTypedEvent(Enable, FuncId, Sled); 170*68d75effSDimitry Andric break; 171*68d75effSDimitry Andric default: 172*68d75effSDimitry Andric Report("Unsupported sled kind '%d' @%04x\n", Sled.Address, int(Sled.Kind)); 173*68d75effSDimitry Andric return false; 174*68d75effSDimitry Andric } 175*68d75effSDimitry Andric return Success; 176*68d75effSDimitry Andric } 177*68d75effSDimitry Andric 178*68d75effSDimitry Andric XRayPatchingStatus patchFunction(int32_t FuncId, 179*68d75effSDimitry Andric bool Enable) XRAY_NEVER_INSTRUMENT { 180*68d75effSDimitry Andric if (!atomic_load(&XRayInitialized, 181*68d75effSDimitry Andric memory_order_acquire)) 182*68d75effSDimitry Andric return XRayPatchingStatus::NOT_INITIALIZED; // Not initialized. 183*68d75effSDimitry Andric 184*68d75effSDimitry Andric uint8_t NotPatching = false; 185*68d75effSDimitry Andric if (!atomic_compare_exchange_strong( 186*68d75effSDimitry Andric &XRayPatching, &NotPatching, true, memory_order_acq_rel)) 187*68d75effSDimitry Andric return XRayPatchingStatus::ONGOING; // Already patching. 188*68d75effSDimitry Andric 189*68d75effSDimitry Andric // Next, we look for the function index. 190*68d75effSDimitry Andric XRaySledMap InstrMap; 191*68d75effSDimitry Andric { 192*68d75effSDimitry Andric SpinMutexLock Guard(&XRayInstrMapMutex); 193*68d75effSDimitry Andric InstrMap = XRayInstrMap; 194*68d75effSDimitry Andric } 195*68d75effSDimitry Andric 196*68d75effSDimitry Andric // If we don't have an index, we can't patch individual functions. 197*68d75effSDimitry Andric if (InstrMap.Functions == 0) 198*68d75effSDimitry Andric return XRayPatchingStatus::NOT_INITIALIZED; 199*68d75effSDimitry Andric 200*68d75effSDimitry Andric // FuncId must be a positive number, less than the number of functions 201*68d75effSDimitry Andric // instrumented. 202*68d75effSDimitry Andric if (FuncId <= 0 || static_cast<size_t>(FuncId) > InstrMap.Functions) { 203*68d75effSDimitry Andric Report("Invalid function id provided: %d\n", FuncId); 204*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 205*68d75effSDimitry Andric } 206*68d75effSDimitry Andric 207*68d75effSDimitry Andric // Now we patch ths sleds for this specific function. 208*68d75effSDimitry Andric auto SledRange = InstrMap.SledsIndex[FuncId - 1]; 209*68d75effSDimitry Andric auto *f = SledRange.Begin; 210*68d75effSDimitry Andric auto *e = SledRange.End; 211*68d75effSDimitry Andric 212*68d75effSDimitry Andric bool SucceedOnce = false; 213*68d75effSDimitry Andric while (f != e) 214*68d75effSDimitry Andric SucceedOnce |= patchSled(*f++, Enable, FuncId); 215*68d75effSDimitry Andric 216*68d75effSDimitry Andric atomic_store(&XRayPatching, false, 217*68d75effSDimitry Andric memory_order_release); 218*68d75effSDimitry Andric 219*68d75effSDimitry Andric if (!SucceedOnce) { 220*68d75effSDimitry Andric Report("Failed patching any sled for function '%d'.", FuncId); 221*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 222*68d75effSDimitry Andric } 223*68d75effSDimitry Andric 224*68d75effSDimitry Andric return XRayPatchingStatus::SUCCESS; 225*68d75effSDimitry Andric } 226*68d75effSDimitry Andric 227*68d75effSDimitry Andric // controlPatching implements the common internals of the patching/unpatching 228*68d75effSDimitry Andric // implementation. |Enable| defines whether we're enabling or disabling the 229*68d75effSDimitry Andric // runtime XRay instrumentation. 230*68d75effSDimitry Andric XRayPatchingStatus controlPatching(bool Enable) XRAY_NEVER_INSTRUMENT { 231*68d75effSDimitry Andric if (!atomic_load(&XRayInitialized, 232*68d75effSDimitry Andric memory_order_acquire)) 233*68d75effSDimitry Andric return XRayPatchingStatus::NOT_INITIALIZED; // Not initialized. 234*68d75effSDimitry Andric 235*68d75effSDimitry Andric uint8_t NotPatching = false; 236*68d75effSDimitry Andric if (!atomic_compare_exchange_strong( 237*68d75effSDimitry Andric &XRayPatching, &NotPatching, true, memory_order_acq_rel)) 238*68d75effSDimitry Andric return XRayPatchingStatus::ONGOING; // Already patching. 239*68d75effSDimitry Andric 240*68d75effSDimitry Andric uint8_t PatchingSuccess = false; 241*68d75effSDimitry Andric auto XRayPatchingStatusResetter = 242*68d75effSDimitry Andric at_scope_exit([&PatchingSuccess] { 243*68d75effSDimitry Andric if (!PatchingSuccess) 244*68d75effSDimitry Andric atomic_store(&XRayPatching, false, 245*68d75effSDimitry Andric memory_order_release); 246*68d75effSDimitry Andric }); 247*68d75effSDimitry Andric 248*68d75effSDimitry Andric XRaySledMap InstrMap; 249*68d75effSDimitry Andric { 250*68d75effSDimitry Andric SpinMutexLock Guard(&XRayInstrMapMutex); 251*68d75effSDimitry Andric InstrMap = XRayInstrMap; 252*68d75effSDimitry Andric } 253*68d75effSDimitry Andric if (InstrMap.Entries == 0) 254*68d75effSDimitry Andric return XRayPatchingStatus::NOT_INITIALIZED; 255*68d75effSDimitry Andric 256*68d75effSDimitry Andric uint32_t FuncId = 1; 257*68d75effSDimitry Andric uint64_t CurFun = 0; 258*68d75effSDimitry Andric 259*68d75effSDimitry Andric // First we want to find the bounds for which we have instrumentation points, 260*68d75effSDimitry Andric // and try to get as few calls to mprotect(...) as possible. We're assuming 261*68d75effSDimitry Andric // that all the sleds for the instrumentation map are contiguous as a single 262*68d75effSDimitry Andric // set of pages. When we do support dynamic shared object instrumentation, 263*68d75effSDimitry Andric // we'll need to do this for each set of page load offsets per DSO loaded. For 264*68d75effSDimitry Andric // now we're assuming we can mprotect the whole section of text between the 265*68d75effSDimitry Andric // minimum sled address and the maximum sled address (+ the largest sled 266*68d75effSDimitry Andric // size). 267*68d75effSDimitry Andric auto MinSled = InstrMap.Sleds[0]; 268*68d75effSDimitry Andric auto MaxSled = InstrMap.Sleds[InstrMap.Entries - 1]; 269*68d75effSDimitry Andric for (std::size_t I = 0; I < InstrMap.Entries; I++) { 270*68d75effSDimitry Andric const auto &Sled = InstrMap.Sleds[I]; 271*68d75effSDimitry Andric if (Sled.Address < MinSled.Address) 272*68d75effSDimitry Andric MinSled = Sled; 273*68d75effSDimitry Andric if (Sled.Address > MaxSled.Address) 274*68d75effSDimitry Andric MaxSled = Sled; 275*68d75effSDimitry Andric } 276*68d75effSDimitry Andric 277*68d75effSDimitry Andric const size_t PageSize = flags()->xray_page_size_override > 0 278*68d75effSDimitry Andric ? flags()->xray_page_size_override 279*68d75effSDimitry Andric : GetPageSizeCached(); 280*68d75effSDimitry Andric if ((PageSize == 0) || ((PageSize & (PageSize - 1)) != 0)) { 281*68d75effSDimitry Andric Report("System page size is not a power of two: %lld\n", PageSize); 282*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 283*68d75effSDimitry Andric } 284*68d75effSDimitry Andric 285*68d75effSDimitry Andric void *PageAlignedAddr = 286*68d75effSDimitry Andric reinterpret_cast<void *>(MinSled.Address & ~(PageSize - 1)); 287*68d75effSDimitry Andric size_t MProtectLen = 288*68d75effSDimitry Andric (MaxSled.Address - reinterpret_cast<uptr>(PageAlignedAddr)) + cSledLength; 289*68d75effSDimitry Andric MProtectHelper Protector(PageAlignedAddr, MProtectLen, PageSize); 290*68d75effSDimitry Andric if (Protector.MakeWriteable() == -1) { 291*68d75effSDimitry Andric Report("Failed mprotect: %d\n", errno); 292*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 293*68d75effSDimitry Andric } 294*68d75effSDimitry Andric 295*68d75effSDimitry Andric for (std::size_t I = 0; I < InstrMap.Entries; ++I) { 296*68d75effSDimitry Andric auto &Sled = InstrMap.Sleds[I]; 297*68d75effSDimitry Andric auto F = Sled.Function; 298*68d75effSDimitry Andric if (CurFun == 0) 299*68d75effSDimitry Andric CurFun = F; 300*68d75effSDimitry Andric if (F != CurFun) { 301*68d75effSDimitry Andric ++FuncId; 302*68d75effSDimitry Andric CurFun = F; 303*68d75effSDimitry Andric } 304*68d75effSDimitry Andric patchSled(Sled, Enable, FuncId); 305*68d75effSDimitry Andric } 306*68d75effSDimitry Andric atomic_store(&XRayPatching, false, 307*68d75effSDimitry Andric memory_order_release); 308*68d75effSDimitry Andric PatchingSuccess = true; 309*68d75effSDimitry Andric return XRayPatchingStatus::SUCCESS; 310*68d75effSDimitry Andric } 311*68d75effSDimitry Andric 312*68d75effSDimitry Andric XRayPatchingStatus mprotectAndPatchFunction(int32_t FuncId, 313*68d75effSDimitry Andric bool Enable) XRAY_NEVER_INSTRUMENT { 314*68d75effSDimitry Andric XRaySledMap InstrMap; 315*68d75effSDimitry Andric { 316*68d75effSDimitry Andric SpinMutexLock Guard(&XRayInstrMapMutex); 317*68d75effSDimitry Andric InstrMap = XRayInstrMap; 318*68d75effSDimitry Andric } 319*68d75effSDimitry Andric 320*68d75effSDimitry Andric // FuncId must be a positive number, less than the number of functions 321*68d75effSDimitry Andric // instrumented. 322*68d75effSDimitry Andric if (FuncId <= 0 || static_cast<size_t>(FuncId) > InstrMap.Functions) { 323*68d75effSDimitry Andric Report("Invalid function id provided: %d\n", FuncId); 324*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 325*68d75effSDimitry Andric } 326*68d75effSDimitry Andric 327*68d75effSDimitry Andric const size_t PageSize = flags()->xray_page_size_override > 0 328*68d75effSDimitry Andric ? flags()->xray_page_size_override 329*68d75effSDimitry Andric : GetPageSizeCached(); 330*68d75effSDimitry Andric if ((PageSize == 0) || ((PageSize & (PageSize - 1)) != 0)) { 331*68d75effSDimitry Andric Report("Provided page size is not a power of two: %lld\n", PageSize); 332*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 333*68d75effSDimitry Andric } 334*68d75effSDimitry Andric 335*68d75effSDimitry Andric // Here we compute the minumum sled and maximum sled associated with a 336*68d75effSDimitry Andric // particular function ID. 337*68d75effSDimitry Andric auto SledRange = InstrMap.SledsIndex[FuncId - 1]; 338*68d75effSDimitry Andric auto *f = SledRange.Begin; 339*68d75effSDimitry Andric auto *e = SledRange.End; 340*68d75effSDimitry Andric auto MinSled = *f; 341*68d75effSDimitry Andric auto MaxSled = *(SledRange.End - 1); 342*68d75effSDimitry Andric while (f != e) { 343*68d75effSDimitry Andric if (f->Address < MinSled.Address) 344*68d75effSDimitry Andric MinSled = *f; 345*68d75effSDimitry Andric if (f->Address > MaxSled.Address) 346*68d75effSDimitry Andric MaxSled = *f; 347*68d75effSDimitry Andric ++f; 348*68d75effSDimitry Andric } 349*68d75effSDimitry Andric 350*68d75effSDimitry Andric void *PageAlignedAddr = 351*68d75effSDimitry Andric reinterpret_cast<void *>(MinSled.Address & ~(PageSize - 1)); 352*68d75effSDimitry Andric size_t MProtectLen = 353*68d75effSDimitry Andric (MaxSled.Address - reinterpret_cast<uptr>(PageAlignedAddr)) + cSledLength; 354*68d75effSDimitry Andric MProtectHelper Protector(PageAlignedAddr, MProtectLen, PageSize); 355*68d75effSDimitry Andric if (Protector.MakeWriteable() == -1) { 356*68d75effSDimitry Andric Report("Failed mprotect: %d\n", errno); 357*68d75effSDimitry Andric return XRayPatchingStatus::FAILED; 358*68d75effSDimitry Andric } 359*68d75effSDimitry Andric return patchFunction(FuncId, Enable); 360*68d75effSDimitry Andric } 361*68d75effSDimitry Andric 362*68d75effSDimitry Andric } // namespace 363*68d75effSDimitry Andric 364*68d75effSDimitry Andric } // namespace __xray 365*68d75effSDimitry Andric 366*68d75effSDimitry Andric using namespace __xray; 367*68d75effSDimitry Andric 368*68d75effSDimitry Andric // The following functions are declared `extern "C" {...}` in the header, hence 369*68d75effSDimitry Andric // they're defined in the global namespace. 370*68d75effSDimitry Andric 371*68d75effSDimitry Andric int __xray_set_handler(void (*entry)(int32_t, 372*68d75effSDimitry Andric XRayEntryType)) XRAY_NEVER_INSTRUMENT { 373*68d75effSDimitry Andric if (atomic_load(&XRayInitialized, 374*68d75effSDimitry Andric memory_order_acquire)) { 375*68d75effSDimitry Andric 376*68d75effSDimitry Andric atomic_store(&__xray::XRayPatchedFunction, 377*68d75effSDimitry Andric reinterpret_cast<uintptr_t>(entry), 378*68d75effSDimitry Andric memory_order_release); 379*68d75effSDimitry Andric return 1; 380*68d75effSDimitry Andric } 381*68d75effSDimitry Andric return 0; 382*68d75effSDimitry Andric } 383*68d75effSDimitry Andric 384*68d75effSDimitry Andric int __xray_set_customevent_handler(void (*entry)(void *, size_t)) 385*68d75effSDimitry Andric XRAY_NEVER_INSTRUMENT { 386*68d75effSDimitry Andric if (atomic_load(&XRayInitialized, 387*68d75effSDimitry Andric memory_order_acquire)) { 388*68d75effSDimitry Andric atomic_store(&__xray::XRayPatchedCustomEvent, 389*68d75effSDimitry Andric reinterpret_cast<uintptr_t>(entry), 390*68d75effSDimitry Andric memory_order_release); 391*68d75effSDimitry Andric return 1; 392*68d75effSDimitry Andric } 393*68d75effSDimitry Andric return 0; 394*68d75effSDimitry Andric } 395*68d75effSDimitry Andric 396*68d75effSDimitry Andric int __xray_set_typedevent_handler(void (*entry)( 397*68d75effSDimitry Andric uint16_t, const void *, size_t)) XRAY_NEVER_INSTRUMENT { 398*68d75effSDimitry Andric if (atomic_load(&XRayInitialized, 399*68d75effSDimitry Andric memory_order_acquire)) { 400*68d75effSDimitry Andric atomic_store(&__xray::XRayPatchedTypedEvent, 401*68d75effSDimitry Andric reinterpret_cast<uintptr_t>(entry), 402*68d75effSDimitry Andric memory_order_release); 403*68d75effSDimitry Andric return 1; 404*68d75effSDimitry Andric } 405*68d75effSDimitry Andric return 0; 406*68d75effSDimitry Andric } 407*68d75effSDimitry Andric 408*68d75effSDimitry Andric int __xray_remove_handler() XRAY_NEVER_INSTRUMENT { 409*68d75effSDimitry Andric return __xray_set_handler(nullptr); 410*68d75effSDimitry Andric } 411*68d75effSDimitry Andric 412*68d75effSDimitry Andric int __xray_remove_customevent_handler() XRAY_NEVER_INSTRUMENT { 413*68d75effSDimitry Andric return __xray_set_customevent_handler(nullptr); 414*68d75effSDimitry Andric } 415*68d75effSDimitry Andric 416*68d75effSDimitry Andric int __xray_remove_typedevent_handler() XRAY_NEVER_INSTRUMENT { 417*68d75effSDimitry Andric return __xray_set_typedevent_handler(nullptr); 418*68d75effSDimitry Andric } 419*68d75effSDimitry Andric 420*68d75effSDimitry Andric uint16_t __xray_register_event_type( 421*68d75effSDimitry Andric const char *const event_type) XRAY_NEVER_INSTRUMENT { 422*68d75effSDimitry Andric TypeDescriptorMapType::Handle h(&TypeDescriptorAddressMap, (uptr)event_type); 423*68d75effSDimitry Andric if (h.created()) { 424*68d75effSDimitry Andric h->type_id = atomic_fetch_add( 425*68d75effSDimitry Andric &TypeEventDescriptorCounter, 1, memory_order_acq_rel); 426*68d75effSDimitry Andric h->description_string_length = strnlen(event_type, 1024); 427*68d75effSDimitry Andric } 428*68d75effSDimitry Andric return h->type_id; 429*68d75effSDimitry Andric } 430*68d75effSDimitry Andric 431*68d75effSDimitry Andric XRayPatchingStatus __xray_patch() XRAY_NEVER_INSTRUMENT { 432*68d75effSDimitry Andric return controlPatching(true); 433*68d75effSDimitry Andric } 434*68d75effSDimitry Andric 435*68d75effSDimitry Andric XRayPatchingStatus __xray_unpatch() XRAY_NEVER_INSTRUMENT { 436*68d75effSDimitry Andric return controlPatching(false); 437*68d75effSDimitry Andric } 438*68d75effSDimitry Andric 439*68d75effSDimitry Andric XRayPatchingStatus __xray_patch_function(int32_t FuncId) XRAY_NEVER_INSTRUMENT { 440*68d75effSDimitry Andric return mprotectAndPatchFunction(FuncId, true); 441*68d75effSDimitry Andric } 442*68d75effSDimitry Andric 443*68d75effSDimitry Andric XRayPatchingStatus 444*68d75effSDimitry Andric __xray_unpatch_function(int32_t FuncId) XRAY_NEVER_INSTRUMENT { 445*68d75effSDimitry Andric return mprotectAndPatchFunction(FuncId, false); 446*68d75effSDimitry Andric } 447*68d75effSDimitry Andric 448*68d75effSDimitry Andric int __xray_set_handler_arg1(void (*entry)(int32_t, XRayEntryType, uint64_t)) { 449*68d75effSDimitry Andric if (!atomic_load(&XRayInitialized, 450*68d75effSDimitry Andric memory_order_acquire)) 451*68d75effSDimitry Andric return 0; 452*68d75effSDimitry Andric 453*68d75effSDimitry Andric // A relaxed write might not be visible even if the current thread gets 454*68d75effSDimitry Andric // scheduled on a different CPU/NUMA node. We need to wait for everyone to 455*68d75effSDimitry Andric // have this handler installed for consistency of collected data across CPUs. 456*68d75effSDimitry Andric atomic_store(&XRayArgLogger, reinterpret_cast<uint64_t>(entry), 457*68d75effSDimitry Andric memory_order_release); 458*68d75effSDimitry Andric return 1; 459*68d75effSDimitry Andric } 460*68d75effSDimitry Andric 461*68d75effSDimitry Andric int __xray_remove_handler_arg1() { return __xray_set_handler_arg1(nullptr); } 462*68d75effSDimitry Andric 463*68d75effSDimitry Andric uintptr_t __xray_function_address(int32_t FuncId) XRAY_NEVER_INSTRUMENT { 464*68d75effSDimitry Andric SpinMutexLock Guard(&XRayInstrMapMutex); 465*68d75effSDimitry Andric if (FuncId <= 0 || static_cast<size_t>(FuncId) > XRayInstrMap.Functions) 466*68d75effSDimitry Andric return 0; 467*68d75effSDimitry Andric return XRayInstrMap.SledsIndex[FuncId - 1].Begin->Function 468*68d75effSDimitry Andric // On PPC, function entries are always aligned to 16 bytes. The beginning of a 469*68d75effSDimitry Andric // sled might be a local entry, which is always +8 based on the global entry. 470*68d75effSDimitry Andric // Always return the global entry. 471*68d75effSDimitry Andric #ifdef __PPC__ 472*68d75effSDimitry Andric & ~0xf 473*68d75effSDimitry Andric #endif 474*68d75effSDimitry Andric ; 475*68d75effSDimitry Andric } 476*68d75effSDimitry Andric 477*68d75effSDimitry Andric size_t __xray_max_function_id() XRAY_NEVER_INSTRUMENT { 478*68d75effSDimitry Andric SpinMutexLock Guard(&XRayInstrMapMutex); 479*68d75effSDimitry Andric return XRayInstrMap.Functions; 480*68d75effSDimitry Andric } 481