1*7330f729Sjoerg //===--- Stack.cpp - Utilities for dealing with stack space ---------------===//
2*7330f729Sjoerg //
3*7330f729Sjoerg // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*7330f729Sjoerg // See https://llvm.org/LICENSE.txt for license information.
5*7330f729Sjoerg // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*7330f729Sjoerg //
7*7330f729Sjoerg //===----------------------------------------------------------------------===//
8*7330f729Sjoerg ///
9*7330f729Sjoerg /// \file
10*7330f729Sjoerg /// Defines utilities for dealing with stack allocation and stack space.
11*7330f729Sjoerg ///
12*7330f729Sjoerg //===----------------------------------------------------------------------===//
13*7330f729Sjoerg
14*7330f729Sjoerg #include "clang/Basic/Stack.h"
15*7330f729Sjoerg #include "llvm/ADT/Optional.h"
16*7330f729Sjoerg #include "llvm/Support/CrashRecoveryContext.h"
17*7330f729Sjoerg
18*7330f729Sjoerg #ifdef _MSC_VER
19*7330f729Sjoerg #include <intrin.h> // for _AddressOfReturnAddress
20*7330f729Sjoerg #endif
21*7330f729Sjoerg
22*7330f729Sjoerg static LLVM_THREAD_LOCAL void *BottomOfStack = nullptr;
23*7330f729Sjoerg
getStackPointer()24*7330f729Sjoerg static void *getStackPointer() {
25*7330f729Sjoerg #if __GNUC__ || __has_builtin(__builtin_frame_address)
26*7330f729Sjoerg return __builtin_frame_address(0);
27*7330f729Sjoerg #elif defined(_MSC_VER)
28*7330f729Sjoerg return _AddressOfReturnAddress();
29*7330f729Sjoerg #else
30*7330f729Sjoerg char CharOnStack = 0;
31*7330f729Sjoerg // The volatile store here is intended to escape the local variable, to
32*7330f729Sjoerg // prevent the compiler from optimizing CharOnStack into anything other
33*7330f729Sjoerg // than a char on the stack.
34*7330f729Sjoerg //
35*7330f729Sjoerg // Tested on: MSVC 2015 - 2019, GCC 4.9 - 9, Clang 3.2 - 9, ICC 13 - 19.
36*7330f729Sjoerg char *volatile Ptr = &CharOnStack;
37*7330f729Sjoerg return Ptr;
38*7330f729Sjoerg #endif
39*7330f729Sjoerg }
40*7330f729Sjoerg
noteBottomOfStack()41*7330f729Sjoerg void clang::noteBottomOfStack() {
42*7330f729Sjoerg if (!BottomOfStack)
43*7330f729Sjoerg BottomOfStack = getStackPointer();
44*7330f729Sjoerg }
45*7330f729Sjoerg
isStackNearlyExhausted()46*7330f729Sjoerg bool clang::isStackNearlyExhausted() {
47*7330f729Sjoerg // We consider 256 KiB to be sufficient for any code that runs between checks
48*7330f729Sjoerg // for stack size.
49*7330f729Sjoerg constexpr size_t SufficientStack = 256 << 10;
50*7330f729Sjoerg
51*7330f729Sjoerg // If we don't know where the bottom of the stack is, hope for the best.
52*7330f729Sjoerg if (!BottomOfStack)
53*7330f729Sjoerg return false;
54*7330f729Sjoerg
55*7330f729Sjoerg intptr_t StackDiff = (intptr_t)getStackPointer() - (intptr_t)BottomOfStack;
56*7330f729Sjoerg size_t StackUsage = (size_t)std::abs(StackDiff);
57*7330f729Sjoerg
58*7330f729Sjoerg // If the stack pointer has a surprising value, we do not understand this
59*7330f729Sjoerg // stack usage scheme. (Perhaps the target allocates new stack regions on
60*7330f729Sjoerg // demand for us.) Don't try to guess what's going on.
61*7330f729Sjoerg if (StackUsage > DesiredStackSize)
62*7330f729Sjoerg return false;
63*7330f729Sjoerg
64*7330f729Sjoerg return StackUsage >= DesiredStackSize - SufficientStack;
65*7330f729Sjoerg }
66*7330f729Sjoerg
runWithSufficientStackSpaceSlow(llvm::function_ref<void ()> Diag,llvm::function_ref<void ()> Fn)67*7330f729Sjoerg void clang::runWithSufficientStackSpaceSlow(llvm::function_ref<void()> Diag,
68*7330f729Sjoerg llvm::function_ref<void()> Fn) {
69*7330f729Sjoerg llvm::CrashRecoveryContext CRC;
70*7330f729Sjoerg CRC.RunSafelyOnThread([&] {
71*7330f729Sjoerg noteBottomOfStack();
72*7330f729Sjoerg Diag();
73*7330f729Sjoerg Fn();
74*7330f729Sjoerg }, DesiredStackSize);
75*7330f729Sjoerg }
76