Where Online Learning is simpler!
The C and C++ Include Header Files
/usr/include/c++/13/stacktrace
$ cat -n /usr/include/c++/13/stacktrace 1 //
-*- C++ -*- 2 3 // Copyright The GNU Toolchain Authors. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3. 9 10 // This library is distributed in the hope that it will be useful, 11 // but WITHOUT ANY WARRANTY; without even the implied warranty of 12 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 // GNU General Public License for more details. 14 15 // Under Section 7 of GPL version 3, you are granted additional 16 // permissions described in the GCC Runtime Library Exception, version 17 // 3.1, as published by the Free Software Foundation. 18 19 // You should have received a copy of the GNU General Public License and 20 // a copy of the GCC Runtime Library Exception along with this program; 21 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 22 //
. 23 24 #ifndef _GLIBCXX_STACKTRACE 25 #define _GLIBCXX_STACKTRACE 1 26 27 #pragma GCC system_header 28 29 #include
// std::string bound 30 31 #include
32 33 #if __cplusplus > 202002L && _GLIBCXX_HAVE_STACKTRACE 34 #include
35 #include
36 #include
37 #include
38 #include
39 #include
40 #include
41 #include
42 #include
43 #include
44 45 struct __glibcxx_backtrace_state; 46 struct __glibcxx_backtrace_simple_data; 47 48 extern "C" 49 { 50 __glibcxx_backtrace_state* 51 __glibcxx_backtrace_create_state(const char*, int, 52 void(*)(void*, const char*, int), 53 void*); 54 55 int 56 __glibcxx_backtrace_simple(__glibcxx_backtrace_state*, int, 57 int (*) (void*, __UINTPTR_TYPE__), 58 void(*)(void*, const char*, int), 59 void*); 60 int 61 __glibcxx_backtrace_pcinfo(__glibcxx_backtrace_state*, __UINTPTR_TYPE__, 62 int (*)(void*, __UINTPTR_TYPE__, 63 const char*, int, const char*), 64 void(*)(void*, const char*, int), 65 void*); 66 67 int 68 __glibcxx_backtrace_syminfo(__glibcxx_backtrace_state*, __UINTPTR_TYPE__ addr, 69 void (*) (void*, __UINTPTR_TYPE__, const char*, 70 __UINTPTR_TYPE__, __UINTPTR_TYPE__), 71 void(*)(void*, const char*, int), 72 void*); 73 } 74 75 namespace __cxxabiv1 76 { 77 extern "C" char* 78 __cxa_demangle(const char* __mangled_name, char* __output_buffer, 79 size_t* __length, int* __status); 80 } 81 82 namespace std _GLIBCXX_VISIBILITY(default) 83 { 84 _GLIBCXX_BEGIN_NAMESPACE_VERSION 85 86 #define __cpp_lib_stacktrace 202011L 87 88 // [stacktrace.entry], class stacktrace_entry 89 class stacktrace_entry 90 { 91 using uint_least32_t = __UINT_LEAST32_TYPE__; 92 using uintptr_t = __UINTPTR_TYPE__; 93 94 public: 95 using native_handle_type = uintptr_t; 96 97 // [stacktrace.entry.ctor], constructors 98 99 constexpr 100 stacktrace_entry() noexcept = default; 101 102 constexpr 103 stacktrace_entry(const stacktrace_entry& __other) noexcept = default; 104 105 constexpr stacktrace_entry& 106 operator=(const stacktrace_entry& __other) noexcept = default; 107 108 ~stacktrace_entry() = default; 109 110 // [stacktrace.entry.obs], observers 111 112 constexpr native_handle_type 113 native_handle() const noexcept { return _M_pc; } 114 115 constexpr explicit operator bool() const noexcept { return _M_pc != -1; } 116 117 // [stacktrace.entry.query], query 118 string 119 description() const 120 { 121 string __s; 122 _M_get_info(&__s, nullptr, nullptr); 123 return __s; 124 } 125 126 string 127 source_file() const 128 { 129 string __s; 130 _M_get_info(nullptr, &__s, nullptr); 131 return __s; 132 } 133 134 uint_least32_t 135 source_line() const 136 { 137 int __line = 0; 138 _M_get_info(nullptr, nullptr, &__line); 139 return __line; 140 } 141 142 // [stacktrace.entry.cmp], comparison 143 friend constexpr bool 144 operator==(const stacktrace_entry& __x, 145 const stacktrace_entry& __y) noexcept 146 { return __x._M_pc == __y._M_pc; } 147 148 friend constexpr strong_ordering 149 operator<=>(const stacktrace_entry& __x, 150 const stacktrace_entry& __y) noexcept 151 { return __x._M_pc <=> __y._M_pc; } 152 153 private: 154 native_handle_type _M_pc = -1; 155 156 template
friend class basic_stacktrace; 157 158 static void _S_err_handler(void*, const char*, int) { } 159 160 static __glibcxx_backtrace_state* 161 _S_init() 162 { 163 static __glibcxx_backtrace_state* __state 164 = __glibcxx_backtrace_create_state(nullptr, 1, _S_err_handler, nullptr); 165 return __state; 166 } 167 168 friend ostream& 169 operator<<(ostream&, const stacktrace_entry&); 170 171 bool 172 _M_get_info(string* __desc, string* __file, int* __line) const 173 { 174 if (!*this) 175 return false; 176 177 struct _Data 178 { 179 string* _M_desc; 180 string* _M_file; 181 int* _M_line; 182 } __data = { __desc, __file, __line }; 183 184 auto __cb = [](void* __data, uintptr_t, const char* __filename, 185 int __lineno, const char* __function) -> int { 186 auto& __d = *static_cast<_Data*>(__data); 187 if (__function && __d._M_desc) 188 *__d._M_desc = _S_demangle(__function); 189 if (__filename && __d._M_file) 190 *__d._M_file = __filename; 191 if (__d._M_line) 192 *__d._M_line = __lineno; 193 return __function != nullptr; 194 }; 195 const auto __state = _S_init(); 196 if (::__glibcxx_backtrace_pcinfo(__state, _M_pc, +__cb, _S_err_handler, 197 &__data)) 198 return true; 199 if (__desc && __desc->empty()) 200 { 201 auto __cb2 = [](void* __data, uintptr_t, const char* __symname, 202 uintptr_t, uintptr_t) { 203 if (__symname) 204 *static_cast<_Data*>(__data)->_M_desc = _S_demangle(__symname); 205 }; 206 if (::__glibcxx_backtrace_syminfo(__state, _M_pc, +__cb2, 207 _S_err_handler, &__data)) 208 return true; 209 } 210 return false; 211 } 212 213 static string 214 _S_demangle(const char* __name) 215 { 216 string __s; 217 int __status; 218 char* __str = __cxxabiv1::__cxa_demangle(__name, nullptr, nullptr, 219 &__status); 220 if (__status == 0) 221 __s = __str; 222 else 223 __s = __name; 224 __builtin_free(__str); 225 return __s; 226 } 227 }; 228 229 // [stacktrace.basic], class template basic_stacktrace 230 template
231 class basic_stacktrace 232 { 233 using _AllocTraits = allocator_traits<_Allocator>; 234 using uintptr_t = __UINTPTR_TYPE__; 235 236 public: 237 using value_type = stacktrace_entry; 238 using const_reference = const value_type&; 239 using reference = value_type&; 240 using const_iterator 241 = __gnu_cxx::__normal_iterator
; 242 using iterator = const_iterator; 243 using reverse_iterator = std::reverse_iterator
; 244 using const_reverse_iterator = std::reverse_iterator
; 245 using difference_type = ptrdiff_t; 246 using size_type = unsigned short; 247 using allocator_type = _Allocator; 248 249 // [stacktrace.basic.ctor], creation and assignment 250 251 [[__gnu__::__noinline__]] 252 static basic_stacktrace 253 current(const allocator_type& __alloc = allocator_type()) noexcept 254 { 255 basic_stacktrace __ret(__alloc); 256 if (auto __cb = __ret._M_prepare()) [[likely]] 257 { 258 auto __state = stacktrace_entry::_S_init(); 259 if (__glibcxx_backtrace_simple(__state, 1, __cb, 260 stacktrace_entry::_S_err_handler, 261 std::__addressof(__ret))) 262 __ret._M_clear(); 263 } 264 return __ret; 265 } 266 267 [[__gnu__::__noinline__]] 268 static basic_stacktrace 269 current(size_type __skip, 270 const allocator_type& __alloc = allocator_type()) noexcept 271 { 272 basic_stacktrace __ret(__alloc); 273 if (__skip >= __INT_MAX__) [[unlikely]] 274 return __ret; 275 if (auto __cb = __ret._M_prepare()) [[likely]] 276 { 277 auto __state = stacktrace_entry::_S_init(); 278 if (__glibcxx_backtrace_simple(__state, __skip + 1, __cb, 279 stacktrace_entry::_S_err_handler, 280 std::__addressof(__ret))) 281 __ret._M_clear(); 282 } 283 284 return __ret; 285 } 286 287 [[__gnu__::__noinline__]] 288 static basic_stacktrace 289 current(size_type __skip, size_type __max_depth, 290 const allocator_type& __alloc = allocator_type()) noexcept 291 { 292 __glibcxx_assert(__skip <= (size_type(-1) - __max_depth)); 293 294 basic_stacktrace __ret(__alloc); 295 if (__max_depth == 0) [[unlikely]] 296 return __ret; 297 if (__skip >= __INT_MAX__) [[unlikely]] 298 return __ret; 299 if (auto __cb = __ret._M_prepare(__max_depth)) [[likely]] 300 { 301 auto __state = stacktrace_entry::_S_init(); 302 int __err = __glibcxx_backtrace_simple(__state, __skip + 1, __cb, 303 stacktrace_entry::_S_err_handler, 304 std::__addressof(__ret)); 305 if (__err < 0) 306 __ret._M_clear(); 307 else if (__ret.size() > __max_depth) 308 { 309 __ret._M_impl._M_resize(__max_depth, __ret._M_alloc); 310 311 if (__ret._M_impl._M_capacity / 2 >= __max_depth) 312 { 313 // shrink to fit 314 _Impl __tmp = __ret._M_impl._M_clone(__ret._M_alloc); 315 if (__tmp._M_capacity) 316 { 317 __ret._M_clear(); 318 __ret._M_impl = __tmp; 319 } 320 } 321 } 322 } 323 return __ret; 324 } 325 326 basic_stacktrace() 327 noexcept(is_nothrow_default_constructible_v
) 328 { } 329 330 explicit 331 basic_stacktrace(const allocator_type& __alloc) noexcept 332 : _M_alloc(__alloc) 333 { } 334 335 basic_stacktrace(const basic_stacktrace& __other) noexcept 336 : basic_stacktrace(__other, 337 _AllocTraits::select_on_container_copy_construction(__other._M_alloc)) 338 { } 339 340 basic_stacktrace(basic_stacktrace&& __other) noexcept 341 : _M_alloc(std::move(__other._M_alloc)), 342 _M_impl(std::__exchange(__other._M_impl, {})) 343 { } 344 345 basic_stacktrace(const basic_stacktrace& __other, 346 const allocator_type& __alloc) noexcept 347 : _M_alloc(__alloc) 348 { 349 if (const auto __s = __other._M_impl._M_size) 350 _M_impl = __other._M_impl._M_clone(_M_alloc); 351 } 352 353 basic_stacktrace(basic_stacktrace&& __other, 354 const allocator_type& __alloc) noexcept 355 : _M_alloc(__alloc) 356 { 357 if constexpr (_Allocator::is_always_equal::value) 358 _M_impl = std::__exchange(__other._M_impl, {}); 359 else if (_M_alloc == __other._M_alloc) 360 _M_impl = std::__exchange(__other._M_impl, {}); 361 else if (const auto __s = __other._M_impl._M_size) 362 _M_impl = __other._M_impl._M_clone(_M_alloc); 363 } 364 365 basic_stacktrace& 366 operator=(const basic_stacktrace& __other) noexcept 367 { 368 if (std::__addressof(__other) == this) 369 return *this; 370 371 constexpr bool __pocca 372 = _AllocTraits::propagate_on_container_copy_assignment::value; 373 constexpr bool __always_eq = _AllocTraits::is_always_equal::value; 374 375 const auto __s = __other.size(); 376 377 if constexpr (!__always_eq && __pocca) 378 { 379 if (_M_alloc != __other._M_alloc) 380 { 381 // Cannot keep the same storage, so deallocate it now. 382 _M_clear(); 383 } 384 } 385 386 if (_M_impl._M_capacity < __s) 387 { 388 // Need to allocate new storage. 389 _M_clear(); 390 391 if constexpr (__pocca) 392 _M_alloc = __other._M_alloc; 393 394 _M_impl = __other._M_impl._M_clone(_M_alloc); 395 } 396 else 397 { 398 // Current storage is large enough. 399 _M_impl._M_resize(0, _M_alloc); 400 _M_impl._M_assign(__other._M_impl, _M_alloc); 401 402 if constexpr (__pocca) 403 _M_alloc = __other._M_alloc; 404 } 405 406 return *this; 407 } 408 409 basic_stacktrace& 410 operator=(basic_stacktrace&& __other) noexcept 411 { 412 if (std::__addressof(__other) == this) 413 return *this; 414 415 constexpr bool __pocma 416 = _AllocTraits::propagate_on_container_move_assignment::value; 417 418 if constexpr (_AllocTraits::is_always_equal::value) 419 std::swap(_M_impl, __other._M_impl); 420 else if (_M_alloc == __other._M_alloc) 421 std::swap(_M_impl, __other._M_impl); 422 else if constexpr (__pocma) 423 { 424 // Free current storage and take ownership of __other's storage. 425 _M_clear(); 426 _M_impl = std::__exchange(__other._M_impl, {}); 427 } 428 else // Allocators are unequal and don't propagate. 429 { 430 const size_type __s = __other.size(); 431 432 if (_M_impl._M_capacity < __s) 433 { 434 // Need to allocate new storage. 435 _M_clear(); 436 _M_impl = __other._M_impl._M_clone(_M_alloc); 437 } 438 else 439 { 440 // Current storage is large enough. 441 _M_impl._M_resize(0, _M_alloc); 442 _M_impl._M_assign(__other._M_impl, _M_alloc); 443 } 444 } 445 446 if constexpr (__pocma) 447 _M_alloc = std::move(__other._M_alloc); 448 449 return *this; 450 } 451 452 constexpr ~basic_stacktrace() 453 { 454 _M_clear(); 455 } 456 457 // [stacktrace.basic.obs], observers 458 allocator_type get_allocator() const noexcept { return _M_alloc; } 459 460 const_iterator 461 begin() const noexcept 462 { return const_iterator{_M_impl._M_frames}; } 463 464 const_iterator 465 end() const noexcept 466 { return begin() + size(); } 467 468 const_reverse_iterator 469 rbegin() const noexcept 470 { return std::make_reverse_iterator(end()); } 471 472 const_reverse_iterator 473 rend() const noexcept 474 { return std::make_reverse_iterator(begin()); } 475 476 const_iterator cbegin() const noexcept { return begin(); } 477 const_iterator cend() const noexcept { return end(); } 478 const_reverse_iterator crbegin() const noexcept { return rbegin(); }; 479 const_reverse_iterator crend() const noexcept { return rend(); }; 480 481 [[nodiscard]] bool empty() const noexcept { return size() == 0; } 482 size_type size() const noexcept { return _M_impl._M_size; } 483 484 size_type 485 max_size() const noexcept 486 { return _Impl::_S_max_size(_M_alloc); } 487 488 const_reference 489 operator[](size_type __n) const noexcept 490 { 491 __glibcxx_assert(__n < size()); 492 return begin()[__n]; 493 } 494 495 const_reference 496 at(size_type __n) const 497 { 498 if (__n >= size()) 499 __throw_out_of_range("basic_stacktrace::at: bad frame number"); 500 return begin()[__n]; 501 } 502 503 // [stacktrace.basic.cmp], comparisons 504 template
505 friend bool 506 operator==(const basic_stacktrace& __x, 507 const basic_stacktrace<_Allocator2>& __y) noexcept 508 { return std::equal(__x.begin(), __x.end(), __y.begin(), __y.end()); } 509 510 template
511 friend strong_ordering 512 operator<=>(const basic_stacktrace& __x, 513 const basic_stacktrace<_Allocator2>& __y) noexcept 514 { 515 if (auto __s = __x.size() <=> __y.size(); __s != 0) 516 return __s; 517 return std::lexicographical_compare_three_way(__x.begin(), __x.end(), 518 __y.begin(), __y.end()); 519 } 520 521 // [stacktrace.basic.mod], modifiers 522 void 523 swap(basic_stacktrace& __other) noexcept 524 { 525 std::swap(_M_impl, __other._M_impl); 526 if constexpr (_AllocTraits::propagate_on_container_swap::value) 527 std::swap(_M_alloc, __other._M_alloc); 528 else if constexpr (!_AllocTraits::is_always_equal::value) 529 { 530 __glibcxx_assert(_M_alloc == __other._M_alloc); 531 } 532 } 533 534 private: 535 bool 536 _M_push_back(const value_type& __x) noexcept 537 { 538 return _M_impl._M_push_back(_M_alloc, __x); 539 } 540 541 void 542 _M_clear() noexcept 543 { 544 _M_impl._M_resize(0, _M_alloc); 545 _M_impl._M_deallocate(_M_alloc); 546 } 547 548 // Precondition: __max_depth != 0 549 auto 550 _M_prepare(size_type __max_depth = -1) noexcept 551 -> int (*) (void*, uintptr_t) 552 { 553 auto __cb = +[](void* __data, uintptr_t __pc) { 554 auto& __s = *static_cast
(__data); 555 stacktrace_entry __f; 556 __f._M_pc = __pc; 557 if (__s._M_push_back(__f)) [[likely]] 558 return 0; // continue tracing 559 return -1; // stop tracing due to error 560 }; 561 562 if (__max_depth > 128) 563 __max_depth = 64; // soft limit, _M_push_back will reallocate 564 else 565 __cb = [](void* __data, uintptr_t __pc) { 566 auto& __s = *static_cast
(__data); 567 stacktrace_entry __f; 568 __f._M_pc = __pc; 569 if (__s.size() == __s._M_impl._M_capacity) [[unlikely]] 570 return 1; // stop tracing due to reaching max depth 571 if (__s._M_push_back(__f)) [[likely]] 572 return 0; // continue tracing 573 return -1; // stop tracing due to error 574 }; 575 576 if (_M_impl._M_allocate(_M_alloc, __max_depth)) [[likely]] 577 return __cb; 578 return nullptr; 579 } 580 581 struct _Impl 582 { 583 using pointer = typename _AllocTraits::pointer; 584 585 pointer _M_frames = nullptr; 586 size_type _M_size = 0; 587 size_type _M_capacity = 0; 588 589 static size_type 590 _S_max_size(const allocator_type& __alloc) noexcept 591 { 592 const size_t __size_max = __gnu_cxx::__int_traits
::__max; 593 const size_t __alloc_max = _AllocTraits::max_size(__alloc); 594 return std::min(__size_max, __alloc_max); 595 } 596 597 #if __has_builtin(__builtin_operator_new) >= 201802L 598 # define _GLIBCXX_OPERATOR_NEW __builtin_operator_new 599 # define _GLIBCXX_OPERATOR_DELETE __builtin_operator_delete 600 #else 601 # define _GLIBCXX_OPERATOR_NEW ::operator new 602 # define _GLIBCXX_OPERATOR_DELETE ::operator delete 603 #endif 604 605 #if __cpp_sized_deallocation 606 # define _GLIBCXX_SIZED_DELETE(T, p, n) \ 607 _GLIBCXX_OPERATOR_DELETE((p), (n) * sizeof(T)) 608 #else 609 # define _GLIBCXX_SIZED_DELETE(T, p, n) _GLIBCXX_OPERATOR_DELETE(p) 610 #endif 611 612 // Precondition: _M_frames == nullptr && __n != 0 613 pointer 614 _M_allocate(allocator_type& __alloc, size_type __n) noexcept 615 { 616 if (__n <= _S_max_size(__alloc)) [[likely]] 617 { 618 if constexpr (is_same_v
>) 619 { 620 // For std::allocator we use nothrow-new directly so we 621 // don't need to handle bad_alloc exceptions. 622 size_t __nb = __n * sizeof(value_type); 623 void* const __p = _GLIBCXX_OPERATOR_NEW (__nb, nothrow_t{}); 624 if (__p == nullptr) [[unlikely]] 625 return nullptr; 626 _M_frames = static_cast
(__p); 627 } 628 else 629 { 630 __try 631 { 632 _M_frames = __alloc.allocate(__n); 633 } 634 __catch (const std::bad_alloc&) 635 { 636 return nullptr; 637 } 638 } 639 _M_capacity = __n; 640 return _M_frames; 641 } 642 return nullptr; 643 } 644 645 void 646 _M_deallocate(allocator_type& __alloc) noexcept 647 { 648 if (_M_capacity) 649 { 650 if constexpr (is_same_v
>) 651 _GLIBCXX_SIZED_DELETE(value_type, 652 static_cast
(_M_frames), 653 _M_capacity); 654 else 655 __alloc.deallocate(_M_frames, _M_capacity); 656 _M_frames = nullptr; 657 _M_capacity = 0; 658 } 659 } 660 661 #undef _GLIBCXX_SIZED_DELETE 662 #undef _GLIBCXX_OPERATOR_DELETE 663 #undef _GLIBCXX_OPERATOR_NEW 664 665 // Precondition: __n <= _M_size 666 void 667 _M_resize(size_type __n, allocator_type& __alloc) noexcept 668 { 669 for (size_type __i = __n; __i < _M_size; ++__i) 670 _AllocTraits::destroy(__alloc, &_M_frames[__i]); 671 _M_size = __n; 672 } 673 674 bool 675 _M_push_back(allocator_type& __alloc, 676 const stacktrace_entry& __f) noexcept 677 { 678 if (_M_size == _M_capacity) [[unlikely]] 679 { 680 _Impl __tmp = _M_xclone(_M_capacity ? _M_capacity : 8, __alloc); 681 if (!__tmp._M_capacity) [[unlikely]] 682 return false; 683 _M_resize(0, __alloc); 684 _M_deallocate(__alloc); 685 *this = __tmp; 686 } 687 stacktrace_entry* __addr = std::to_address(_M_frames + _M_size++); 688 _AllocTraits::construct(__alloc, __addr, __f); 689 return true; 690 } 691 692 // Precondition: _M_size != 0 693 _Impl 694 _M_clone(allocator_type& __alloc) const noexcept 695 { 696 return _M_xclone(_M_size, __alloc); 697 } 698 699 // Precondition: _M_size != 0 || __extra != 0 700 _Impl 701 _M_xclone(size_type __extra, allocator_type& __alloc) const noexcept 702 { 703 _Impl __i; 704 if (__i._M_allocate(__alloc, _M_size + __extra)) [[likely]] 705 __i._M_assign(*this, __alloc); 706 return __i; 707 } 708 709 // Precondition: _M_capacity >= __other._M_size 710 void 711 _M_assign(const _Impl& __other, allocator_type& __alloc) noexcept 712 { 713 std::__uninitialized_copy_a(__other._M_frames, 714 __other._M_frames + __other._M_size, 715 _M_frames, __alloc); 716 _M_size = __other._M_size; 717 } 718 }; 719 720 [[no_unique_address]] allocator_type _M_alloc{}; 721 722 _Impl _M_impl{}; 723 }; 724 725 // basic_stacktrace typedef names 726 using stacktrace = basic_stacktrace
>; 727 728 // [stacktrace.basic.nonmem], non-member functions 729 template
730 inline void 731 swap(basic_stacktrace<_Allocator>& __a, basic_stacktrace<_Allocator>& __b) 732 noexcept(noexcept(__a.swap(__b))) 733 { __a.swap(__b); } 734 735 inline ostream& 736 operator<<(ostream& __os, const stacktrace_entry& __f) 737 { 738 string __desc, __file; 739 int __line; 740 if (__f._M_get_info(&__desc, &__file, &__line)) 741 { 742 __os.width(4); 743 __os << __desc << " at " << __file << ':' << __line; 744 } 745 return __os; 746 } 747 748 template
749 inline ostream& 750 operator<<(ostream& __os, const basic_stacktrace<_Allocator>& __st) 751 { 752 for (stacktrace::size_type __i = 0; __i < __st.size(); ++__i) 753 { 754 __os.width(4); 755 __os << __i << "# " << __st[__i] << '\n'; 756 } 757 return __os; 758 } 759 760 inline string 761 to_string(const stacktrace_entry& __f) 762 { 763 std::ostringstream __os; 764 __os << __f; 765 return std::move(__os).str(); 766 } 767 768 template
769 string 770 to_string(const basic_stacktrace<_Allocator>& __st) 771 { 772 std::ostringstream __os; 773 __os << __st; 774 return std::move(__os).str(); 775 } 776 777 namespace pmr 778 { 779 using stacktrace 780 = basic_stacktrace
>; 781 } 782 783 // [stacktrace.basic.hash], hash support 784 785 template<> 786 struct hash
787 { 788 size_t 789 operator()(const stacktrace_entry& __f) const noexcept 790 { 791 using __h = hash
; 792 return __h()(__f.native_handle()); 793 } 794 }; 795 796 template
797 struct hash
> 798 { 799 size_t 800 operator()(const basic_stacktrace<_Allocator>& __st) const noexcept 801 { 802 hash
__h; 803 size_t __val = _Hash_impl::hash(__st.size()); 804 for (const auto& __f : __st) 805 __val = _Hash_impl::__hash_combine(__h(__f), __val); 806 return __val; 807 } 808 }; 809 810 _GLIBCXX_END_NAMESPACE_VERSION 811 } // namespace std 812 #endif // C++23 813 814 #endif /* _GLIBCXX_STACKTRACE */
Contact us
|
About us
|
Term of use
|
Copyright © 2000-2025 MyWebUniversity.com ™