Where Online Learning is simpler!
The C and C++ Include Header Files
/usr/include/c++/13/ext/rope
$ cat -n /usr/include/c++/13/ext/rope 1 // SGI's rope class -*- C++ -*- 2 3 // Copyright (C) 2001-2023 Free Software Foundation, Inc. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3, or (at your option) 9 // any later version. 10 11 // This library is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 16 // Under Section 7 of GPL version 3, you are granted additional 17 // permissions described in the GCC Runtime Library Exception, version 18 // 3.1, as published by the Free Software Foundation. 19 20 // You should have received a copy of the GNU General Public License and 21 // a copy of the GCC Runtime Library Exception along with this program; 22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 //
. 24 25 /* 26 * Copyright (c) 1997 27 * Silicon Graphics Computer Systems, Inc. 28 * 29 * Permission to use, copy, modify, distribute and sell this software 30 * and its documentation for any purpose is hereby granted without fee, 31 * provided that the above copyright notice appear in all copies and 32 * that both that copyright notice and this permission notice appear 33 * in supporting documentation. Silicon Graphics makes no 34 * representations about the suitability of this software for any 35 * purpose. It is provided "as is" without express or implied warranty. 36 */ 37 38 /** @file ext/rope 39 * This file is a GNU extension to the Standard C++ Library (possibly 40 * containing extensions from the HP/SGI STL subset). 41 */ 42 43 #ifndef _ROPE 44 #define _ROPE 1 45 46 #pragma GCC system_header 47 48 #include
// GNU extensions are currently omitted 49 50 #include
51 #include
52 #include
53 #include
54 #include
55 #include
56 #include
57 #include
58 #include
59 #include
60 61 # ifdef __GC 62 # define __GC_CONST const 63 # else 64 # define __GC_CONST // constant except for deallocation 65 # endif 66 67 #include
// For uninitialized_copy_n 68 69 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default) 70 { 71 _GLIBCXX_BEGIN_NAMESPACE_VERSION 72 73 namespace __detail 74 { 75 enum { _S_max_rope_depth = 45 }; 76 enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function}; 77 } // namespace __detail 78 79 // See libstdc++/36832. 80 template
81 void 82 _Destroy_const(_ForwardIterator __first, 83 _ForwardIterator __last, _Allocator __alloc) 84 { 85 for (; __first != __last; ++__first) 86 __alloc.destroy(&*__first); 87 } 88 89 template
90 inline void 91 _Destroy_const(_ForwardIterator __first, 92 _ForwardIterator __last, std::allocator<_Tp>) 93 { std::_Destroy(__first, __last); } 94 95 // The _S_eos function is used for those functions that 96 // convert to/from C-like strings to detect the end of the string. 97 98 // The end-of-C-string character. 99 // This is what the draft standard says it should be. 100 template
101 inline _CharT 102 _S_eos(_CharT*) 103 { return _CharT(); } 104 105 // Test for basic character types. 106 // For basic character types leaves having a trailing eos. 107 template
108 inline bool 109 _S_is_basic_char_type(_CharT*) 110 { return false; } 111 112 template
113 inline bool 114 _S_is_one_byte_char_type(_CharT*) 115 { return false; } 116 117 inline bool 118 _S_is_basic_char_type(char*) 119 { return true; } 120 121 inline bool 122 _S_is_one_byte_char_type(char*) 123 { return true; } 124 125 inline bool 126 _S_is_basic_char_type(wchar_t*) 127 { return true; } 128 129 // Store an eos iff _CharT is a basic character type. 130 // Do not reference _S_eos if it isn't. 131 template
132 inline void 133 _S_cond_store_eos(_CharT&) { } 134 135 inline void 136 _S_cond_store_eos(char& __c) 137 { __c = 0; } 138 139 inline void 140 _S_cond_store_eos(wchar_t& __c) 141 { __c = 0; } 142 143 // char_producers are logically functions that generate a section of 144 // a string. These can be converted to ropes. The resulting rope 145 // invokes the char_producer on demand. This allows, for example, 146 // files to be viewed as ropes without reading the entire file. 147 template
148 class char_producer 149 { 150 public: 151 virtual ~char_producer() { } 152 153 virtual void 154 operator()(std::size_t __start_pos, std::size_t __len, 155 _CharT* __buffer) = 0; 156 // Buffer should really be an arbitrary output iterator. 157 // That way we could flatten directly into an ostream, etc. 158 // This is thoroughly impossible, since iterator types don't 159 // have runtime descriptions. 160 }; 161 162 // Sequence buffers: 163 // 164 // Sequence must provide an append operation that appends an 165 // array to the sequence. Sequence buffers are useful only if 166 // appending an entire array is cheaper than appending element by element. 167 // This is true for many string representations. 168 // This should perhaps inherit from ostream
169 // and be implemented correspondingly, so that they can be used 170 // for formatted. For the sake of portability, we don't do this yet. 171 // 172 // For now, sequence buffers behave as output iterators. But they also 173 // behave a little like basic_ostringstream
and a 174 // little like containers. 175 176 // Ignore warnings about std::iterator. 177 #pragma GCC diagnostic push 178 #pragma GCC diagnostic ignored "-Wdeprecated-declarations" 179 180 template
181 class sequence_buffer 182 : public std::iterator
183 { 184 public: 185 typedef typename _Sequence::value_type value_type; 186 protected: 187 _Sequence* _M_prefix; 188 value_type _M_buffer[_Buf_sz]; 189 std::size_t _M_buf_count; 190 public: 191 192 void 193 flush() 194 { 195 _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count); 196 _M_buf_count = 0; 197 } 198 199 ~sequence_buffer() 200 { flush(); } 201 202 sequence_buffer() 203 : _M_prefix(0), _M_buf_count(0) { } 204 205 sequence_buffer(const sequence_buffer& __x) 206 { 207 _M_prefix = __x._M_prefix; 208 _M_buf_count = __x._M_buf_count; 209 std::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer); 210 } 211 212 // Non-const "copy" modifies the parameter - yuck 213 sequence_buffer(sequence_buffer& __x) 214 { 215 __x.flush(); 216 _M_prefix = __x._M_prefix; 217 _M_buf_count = 0; 218 } 219 220 sequence_buffer(_Sequence& __s) 221 : _M_prefix(&__s), _M_buf_count(0) { } 222 223 // Non-const "copy" modifies the parameter - yuck 224 sequence_buffer& 225 operator=(sequence_buffer& __x) 226 { 227 __x.flush(); 228 _M_prefix = __x._M_prefix; 229 _M_buf_count = 0; 230 return *this; 231 } 232 233 sequence_buffer& 234 operator=(const sequence_buffer& __x) 235 { 236 _M_prefix = __x._M_prefix; 237 _M_buf_count = __x._M_buf_count; 238 std::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer); 239 return *this; 240 } 241 242 #if __cplusplus >= 201103L 243 sequence_buffer(sequence_buffer&& __x) : sequence_buffer(__x) { } 244 sequence_buffer& operator=(sequence_buffer&& __x) { return *this = __x; } 245 #endif 246 247 void 248 push_back(value_type __x) 249 { 250 if (_M_buf_count < _Buf_sz) 251 { 252 _M_buffer[_M_buf_count] = __x; 253 ++_M_buf_count; 254 } 255 else 256 { 257 flush(); 258 _M_buffer[0] = __x; 259 _M_buf_count = 1; 260 } 261 } 262 263 void 264 append(value_type* __s, std::size_t __len) 265 { 266 if (__len + _M_buf_count <= _Buf_sz) 267 { 268 std::size_t __i = _M_buf_count; 269 for (std::size_t __j = 0; __j < __len; __i++, __j++) 270 _M_buffer[__i] = __s[__j]; 271 _M_buf_count += __len; 272 } 273 else if (0 == _M_buf_count) 274 _M_prefix->append(__s, __s + __len); 275 else 276 { 277 flush(); 278 append(__s, __len); 279 } 280 } 281 282 sequence_buffer& 283 write(value_type* __s, std::size_t __len) 284 { 285 append(__s, __len); 286 return *this; 287 } 288 289 sequence_buffer& 290 put(value_type __x) 291 { 292 push_back(__x); 293 return *this; 294 } 295 296 sequence_buffer& 297 operator=(const value_type& __rhs) 298 { 299 push_back(__rhs); 300 return *this; 301 } 302 303 sequence_buffer& 304 operator*() 305 { return *this; } 306 307 sequence_buffer& 308 operator++() 309 { return *this; } 310 311 sequence_buffer 312 operator++(int) 313 { return *this; } 314 }; 315 #pragma GCC diagnostic pop 316 317 // The following should be treated as private, at least for now. 318 template
319 class _Rope_char_consumer 320 { 321 public: 322 // If we had member templates, these should not be virtual. 323 // For now we need to use run-time parametrization where 324 // compile-time would do. Hence this should all be private 325 // for now. 326 // The symmetry with char_producer is accidental and temporary. 327 virtual ~_Rope_char_consumer() { } 328 329 virtual bool 330 operator()(const _CharT* __buffer, std::size_t __len) = 0; 331 }; 332 333 // First a lot of forward declarations. The standard seems to require 334 // much stricter "declaration before use" than many of the implementations 335 // that preceded it. 336 template
> 337 class rope; 338 339 template
340 struct _Rope_RopeConcatenation; 341 342 template
343 struct _Rope_RopeLeaf; 344 345 template
346 struct _Rope_RopeFunction; 347 348 template
349 struct _Rope_RopeSubstring; 350 351 template
352 class _Rope_iterator; 353 354 template
355 class _Rope_const_iterator; 356 357 template
358 class _Rope_char_ref_proxy; 359 360 template
361 class _Rope_char_ptr_proxy; 362 363 template
364 bool 365 operator==(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x, 366 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y); 367 368 template
369 _Rope_const_iterator<_CharT, _Alloc> 370 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x, 371 std::ptrdiff_t __n); 372 373 template
374 _Rope_const_iterator<_CharT, _Alloc> 375 operator+(const _Rope_const_iterator<_CharT, _Alloc>& __x, 376 std::ptrdiff_t __n); 377 378 template
379 _Rope_const_iterator<_CharT, _Alloc> 380 operator+(std::ptrdiff_t __n, 381 const _Rope_const_iterator<_CharT, _Alloc>& __x); 382 383 template
384 bool 385 operator==(const _Rope_const_iterator<_CharT, _Alloc>& __x, 386 const _Rope_const_iterator<_CharT, _Alloc>& __y); 387 388 template
389 bool 390 operator<(const _Rope_const_iterator<_CharT, _Alloc>& __x, 391 const _Rope_const_iterator<_CharT, _Alloc>& __y); 392 393 template
394 std::ptrdiff_t 395 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x, 396 const _Rope_const_iterator<_CharT, _Alloc>& __y); 397 398 template
399 _Rope_iterator<_CharT, _Alloc> 400 operator-(const _Rope_iterator<_CharT, _Alloc>& __x, std::ptrdiff_t __n); 401 402 template
403 _Rope_iterator<_CharT, _Alloc> 404 operator+(const _Rope_iterator<_CharT, _Alloc>& __x, std::ptrdiff_t __n); 405 406 template
407 _Rope_iterator<_CharT, _Alloc> 408 operator+(std::ptrdiff_t __n, const _Rope_iterator<_CharT, _Alloc>& __x); 409 410 template
411 bool 412 operator==(const _Rope_iterator<_CharT, _Alloc>& __x, 413 const _Rope_iterator<_CharT, _Alloc>& __y); 414 415 template
416 bool 417 operator<(const _Rope_iterator<_CharT, _Alloc>& __x, 418 const _Rope_iterator<_CharT, _Alloc>& __y); 419 420 template
421 std::ptrdiff_t 422 operator-(const _Rope_iterator<_CharT, _Alloc>& __x, 423 const _Rope_iterator<_CharT, _Alloc>& __y); 424 425 template
426 rope<_CharT, _Alloc> 427 operator+(const rope<_CharT, _Alloc>& __left, 428 const rope<_CharT, _Alloc>& __right); 429 430 template
431 rope<_CharT, _Alloc> 432 operator+(const rope<_CharT, _Alloc>& __left, const _CharT* __right); 433 434 template
435 rope<_CharT, _Alloc> 436 operator+(const rope<_CharT, _Alloc>& __left, _CharT __right); 437 438 // Some helpers, so we can use power on ropes. 439 // See below for why this isn't local to the implementation. 440 441 // Ignore warnings about std::binary_function. 442 #pragma GCC diagnostic push 443 #pragma GCC diagnostic ignored "-Wdeprecated-declarations" 444 // This uses a nonstandard refcount convention. 445 // The result has refcount 0. 446 template
447 struct _Rope_Concat_fn 448 : public std::binary_function
, rope<_CharT, _Alloc>, 449 rope<_CharT, _Alloc> > 450 { 451 rope<_CharT, _Alloc> 452 operator()(const rope<_CharT, _Alloc>& __x, 453 const rope<_CharT, _Alloc>& __y) 454 { return __x + __y; } 455 }; 456 #pragma GCC diagnostic pop 457 458 template
459 inline rope<_CharT, _Alloc> 460 identity_element(_Rope_Concat_fn<_CharT, _Alloc>) 461 { return rope<_CharT, _Alloc>(); } 462 463 // Class _Refcount_Base provides a type, _RC_t, a data member, 464 // _M_ref_count, and member functions _M_incr and _M_decr, which perform 465 // atomic preincrement/predecrement. The constructor initializes 466 // _M_ref_count. 467 struct _Refcount_Base 468 { 469 // The type _RC_t 470 typedef std::size_t _RC_t; 471 472 // The data member _M_ref_count 473 _RC_t _M_ref_count; 474 475 // Constructor 476 #ifdef __GTHREAD_MUTEX_INIT 477 __gthread_mutex_t _M_ref_count_lock = __GTHREAD_MUTEX_INIT; 478 #else 479 __gthread_mutex_t _M_ref_count_lock; 480 #endif 481 482 _Refcount_Base(_RC_t __n) : _M_ref_count(__n) 483 { 484 #ifndef __GTHREAD_MUTEX_INIT 485 #ifdef __GTHREAD_MUTEX_INIT_FUNCTION 486 __GTHREAD_MUTEX_INIT_FUNCTION (&_M_ref_count_lock); 487 #else 488 #error __GTHREAD_MUTEX_INIT or __GTHREAD_MUTEX_INIT_FUNCTION should be defined by gthr.h abstraction layer, report problem to libstdc++@gcc.gnu.org. 489 #endif 490 #endif 491 } 492 493 #ifndef __GTHREAD_MUTEX_INIT 494 ~_Refcount_Base() 495 { __gthread_mutex_destroy(&_M_ref_count_lock); } 496 #endif 497 498 void 499 _M_incr() 500 { 501 __gthread_mutex_lock(&_M_ref_count_lock); 502 ++_M_ref_count; 503 __gthread_mutex_unlock(&_M_ref_count_lock); 504 } 505 506 _RC_t 507 _M_decr() 508 { 509 __gthread_mutex_lock(&_M_ref_count_lock); 510 _RC_t __tmp = --_M_ref_count; 511 __gthread_mutex_unlock(&_M_ref_count_lock); 512 return __tmp; 513 } 514 }; 515 516 // 517 // What follows should really be local to rope. Unfortunately, 518 // that doesn't work, since it makes it impossible to define generic 519 // equality on rope iterators. According to the draft standard, the 520 // template parameters for such an equality operator cannot be inferred 521 // from the occurrence of a member class as a parameter. 522 // (SGI compilers in fact allow this, but the __result wouldn't be 523 // portable.) 524 // Similarly, some of the static member functions are member functions 525 // only to avoid polluting the global namespace, and to circumvent 526 // restrictions on type inference for template functions. 527 // 528 529 // 530 // The internal data structure for representing a rope. This is 531 // private to the implementation. A rope is really just a pointer 532 // to one of these. 533 // 534 // A few basic functions for manipulating this data structure 535 // are members of _RopeRep. Most of the more complex algorithms 536 // are implemented as rope members. 537 // 538 // Some of the static member functions of _RopeRep have identically 539 // named functions in rope that simply invoke the _RopeRep versions. 540 541 #define __ROPE_DEFINE_ALLOCS(__a) \ 542 __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \ 543 typedef _Rope_RopeConcatenation<_CharT,__a> __C; \ 544 __ROPE_DEFINE_ALLOC(__C,_C) \ 545 typedef _Rope_RopeLeaf<_CharT,__a> __L; \ 546 __ROPE_DEFINE_ALLOC(__L,_L) \ 547 typedef _Rope_RopeFunction<_CharT,__a> __F; \ 548 __ROPE_DEFINE_ALLOC(__F,_F) \ 549 typedef _Rope_RopeSubstring<_CharT,__a> __S; \ 550 __ROPE_DEFINE_ALLOC(__S,_S) 551 552 // Internal rope nodes potentially store a copy of the allocator 553 // instance used to allocate them. This is mostly redundant. 554 // But the alternative would be to pass allocator instances around 555 // in some form to nearly all internal functions, since any pointer 556 // assignment may result in a zero reference count and thus require 557 // deallocation. 558 559 #define __STATIC_IF_SGI_ALLOC /* not static */ 560 561 template
562 struct _Rope_rep_base 563 : public _Alloc 564 { 565 typedef std::size_t size_type; 566 typedef _Alloc allocator_type; 567 568 allocator_type 569 get_allocator() const 570 { return *static_cast
(this); } 571 572 allocator_type& 573 _M_get_allocator() 574 { return *static_cast<_Alloc*>(this); } 575 576 const allocator_type& 577 _M_get_allocator() const 578 { return *static_cast
(this); } 579 580 _Rope_rep_base(size_type __size, const allocator_type&) 581 : _M_size(__size) { } 582 583 size_type _M_size; 584 585 # define __ROPE_DEFINE_ALLOC(_Tp, __name) \ 586 typedef typename \ 587 __alloc_traits<_Alloc>::template rebind<_Tp>::other __name##Alloc; \ 588 static _Tp* __name##_allocate(size_type __n) \ 589 { return __name##Alloc().allocate(__n); } \ 590 static void __name##_deallocate(_Tp *__p, size_type __n) \ 591 { __name##Alloc().deallocate(__p, __n); } 592 __ROPE_DEFINE_ALLOCS(_Alloc) 593 # undef __ROPE_DEFINE_ALLOC 594 }; 595 596 template
597 struct _Rope_RopeRep 598 : public _Rope_rep_base<_CharT, _Alloc> 599 # ifndef __GC 600 , _Refcount_Base 601 # endif 602 { 603 public: 604 __detail::_Tag _M_tag:8; 605 bool _M_is_balanced:8; 606 unsigned char _M_depth; 607 __GC_CONST _CharT* _M_c_string; 608 #ifdef __GTHREAD_MUTEX_INIT 609 __gthread_mutex_t _M_c_string_lock = __GTHREAD_MUTEX_INIT; 610 #else 611 __gthread_mutex_t _M_c_string_lock; 612 #endif 613 /* Flattened version of string, if needed. */ 614 /* typically 0. */ 615 /* If it's not 0, then the memory is owned */ 616 /* by this node. */ 617 /* In the case of a leaf, this may point to */ 618 /* the same memory as the data field. */ 619 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type 620 allocator_type; 621 typedef std::size_t size_type; 622 623 using _Rope_rep_base<_CharT, _Alloc>::get_allocator; 624 using _Rope_rep_base<_CharT, _Alloc>::_M_get_allocator; 625 626 _Rope_RopeRep(__detail::_Tag __t, int __d, bool __b, size_type __size, 627 const allocator_type& __a) 628 : _Rope_rep_base<_CharT, _Alloc>(__size, __a), 629 #ifndef __GC 630 _Refcount_Base(1), 631 #endif 632 _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0) 633 #ifdef __GTHREAD_MUTEX_INIT 634 { } 635 #else 636 { __GTHREAD_MUTEX_INIT_FUNCTION (&_M_c_string_lock); } 637 ~_Rope_RopeRep() 638 { __gthread_mutex_destroy (&_M_c_string_lock); } 639 #endif 640 #ifdef __GC 641 void 642 _M_incr () { } 643 #endif 644 static void 645 _S_free_string(__GC_CONST _CharT*, size_type __len, 646 allocator_type& __a); 647 #define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a); 648 // Deallocate data section of a leaf. 649 // This shouldn't be a member function. 650 // But its hard to do anything else at the 651 // moment, because it's templatized w.r.t. 652 // an allocator. 653 // Does nothing if __GC is defined. 654 #ifndef __GC 655 void _M_free_c_string(); 656 void _M_free_tree(); 657 // Deallocate t. Assumes t is not 0. 658 void 659 _M_unref_nonnil() 660 { 661 if (0 == _M_decr()) 662 _M_free_tree(); 663 } 664 665 void 666 _M_ref_nonnil() 667 { _M_incr(); } 668 669 static void 670 _S_unref(_Rope_RopeRep* __t) 671 { 672 if (0 != __t) 673 __t->_M_unref_nonnil(); 674 } 675 676 static void 677 _S_ref(_Rope_RopeRep* __t) 678 { 679 if (0 != __t) 680 __t->_M_incr(); 681 } 682 683 static void 684 _S_free_if_unref(_Rope_RopeRep* __t) 685 { 686 if (0 != __t && 0 == __t->_M_ref_count) 687 __t->_M_free_tree(); 688 } 689 # else /* __GC */ 690 void _M_unref_nonnil() { } 691 void _M_ref_nonnil() { } 692 static void _S_unref(_Rope_RopeRep*) { } 693 static void _S_ref(_Rope_RopeRep*) { } 694 static void _S_free_if_unref(_Rope_RopeRep*) { } 695 # endif 696 protected: 697 _Rope_RopeRep& 698 operator=(const _Rope_RopeRep&); 699 700 _Rope_RopeRep(const _Rope_RopeRep&); 701 }; 702 703 template
704 struct _Rope_RopeLeaf 705 : public _Rope_RopeRep<_CharT, _Alloc> 706 { 707 typedef std::size_t size_type; 708 public: 709 // Apparently needed by VC++ 710 // The data fields of leaves are allocated with some 711 // extra space, to accommodate future growth and for basic 712 // character types, to hold a trailing eos character. 713 enum { _S_alloc_granularity = 8 }; 714 715 static size_type 716 _S_rounded_up_size(size_type __n) 717 { 718 size_type __size_with_eos; 719 720 if (_S_is_basic_char_type((_CharT*)0)) 721 __size_with_eos = __n + 1; 722 else 723 __size_with_eos = __n; 724 #ifdef __GC 725 return __size_with_eos; 726 #else 727 // Allow slop for in-place expansion. 728 return ((__size_with_eos + size_type(_S_alloc_granularity) - 1) 729 &~ (size_type(_S_alloc_granularity) - 1)); 730 #endif 731 } 732 __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */ 733 /* The allocated size is */ 734 /* _S_rounded_up_size(size), except */ 735 /* in the GC case, in which it */ 736 /* doesn't matter. */ 737 typedef typename _Rope_rep_base<_CharT,_Alloc>::allocator_type 738 allocator_type; 739 740 _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_type __size, 741 const allocator_type& __a) 742 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_leaf, 0, true, 743 __size, __a), _M_data(__d) 744 { 745 if (_S_is_basic_char_type((_CharT *)0)) 746 { 747 // already eos terminated. 748 this->_M_c_string = __d; 749 } 750 } 751 // The constructor assumes that d has been allocated with 752 // the proper allocator and the properly padded size. 753 // In contrast, the destructor deallocates the data: 754 #ifndef __GC 755 ~_Rope_RopeLeaf() throw() 756 { 757 if (_M_data != this->_M_c_string) 758 this->_M_free_c_string(); 759 760 this->__STL_FREE_STRING(_M_data, this->_M_size, this->_M_get_allocator()); 761 } 762 #endif 763 protected: 764 _Rope_RopeLeaf& 765 operator=(const _Rope_RopeLeaf&); 766 767 _Rope_RopeLeaf(const _Rope_RopeLeaf&); 768 }; 769 770 template
771 struct _Rope_RopeConcatenation 772 : public _Rope_RopeRep<_CharT, _Alloc> 773 { 774 public: 775 _Rope_RopeRep<_CharT, _Alloc>* _M_left; 776 _Rope_RopeRep<_CharT, _Alloc>* _M_right; 777 778 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type 779 allocator_type; 780 781 _Rope_RopeConcatenation(_Rope_RopeRep<_CharT, _Alloc>* __l, 782 _Rope_RopeRep<_CharT, _Alloc>* __r, 783 const allocator_type& __a) 784 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_concat, 785 std::max(__l->_M_depth, 786 __r->_M_depth) + 1, 787 false, 788 __l->_M_size + __r->_M_size, __a), 789 _M_left(__l), _M_right(__r) 790 { } 791 #ifndef __GC 792 ~_Rope_RopeConcatenation() throw() 793 { 794 this->_M_free_c_string(); 795 _M_left->_M_unref_nonnil(); 796 _M_right->_M_unref_nonnil(); 797 } 798 #endif 799 protected: 800 _Rope_RopeConcatenation& 801 operator=(const _Rope_RopeConcatenation&); 802 803 _Rope_RopeConcatenation(const _Rope_RopeConcatenation&); 804 }; 805 806 template
807 struct _Rope_RopeFunction 808 : public _Rope_RopeRep<_CharT, _Alloc> 809 { 810 public: 811 char_producer<_CharT>* _M_fn; 812 #ifndef __GC 813 bool _M_delete_when_done; // Char_producer is owned by the 814 // rope and should be explicitly 815 // deleted when the rope becomes 816 // inaccessible. 817 #else 818 // In the GC case, we either register the rope for 819 // finalization, or not. Thus the field is unnecessary; 820 // the information is stored in the collector data structures. 821 // We do need a finalization procedure to be invoked by the 822 // collector. 823 static void 824 _S_fn_finalization_proc(void * __tree, void *) 825 { delete ((_Rope_RopeFunction *)__tree) -> _M_fn; } 826 #endif 827 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type 828 allocator_type; 829 830 _Rope_RopeFunction(char_producer<_CharT>* __f, std::size_t __size, 831 bool __d, const allocator_type& __a) 832 : _Rope_RopeRep<_CharT, _Alloc>(__detail::_S_function, 0, true, __size, __a) 833 , _M_fn(__f) 834 #ifndef __GC 835 , _M_delete_when_done(__d) 836 #endif 837 { 838 #ifdef __GC 839 if (__d) 840 { 841 GC_REGISTER_FINALIZER(this, _Rope_RopeFunction:: 842 _S_fn_finalization_proc, 0, 0, 0); 843 } 844 #endif 845 } 846 #ifndef __GC 847 ~_Rope_RopeFunction() throw() 848 { 849 this->_M_free_c_string(); 850 if (_M_delete_when_done) 851 delete _M_fn; 852 } 853 # endif 854 protected: 855 _Rope_RopeFunction& 856 operator=(const _Rope_RopeFunction&); 857 858 _Rope_RopeFunction(const _Rope_RopeFunction&); 859 }; 860 // Substring results are usually represented using just 861 // concatenation nodes. But in the case of very long flat ropes 862 // or ropes with a functional representation that isn't practical. 863 // In that case, we represent the __result as a special case of 864 // RopeFunction, whose char_producer points back to the rope itself. 865 // In all cases except repeated substring operations and 866 // deallocation, we treat the __result as a RopeFunction. 867 template
868 struct _Rope_RopeSubstring 869 : public _Rope_RopeFunction<_CharT, _Alloc>, 870 public char_producer<_CharT> 871 { 872 typedef std::size_t size_type; 873 public: 874 // XXX this whole class should be rewritten. 875 _Rope_RopeRep<_CharT,_Alloc>* _M_base; // not 0 876 size_type _M_start; 877 878 virtual void 879 operator()(size_type __start_pos, size_type __req_len, 880 _CharT* __buffer) 881 { 882 switch(_M_base->_M_tag) 883 { 884 case __detail::_S_function: 885 case __detail::_S_substringfn: 886 { 887 char_producer<_CharT>* __fn = 888 ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn; 889 (*__fn)(__start_pos + _M_start, __req_len, __buffer); 890 } 891 break; 892 case __detail::_S_leaf: 893 { 894 __GC_CONST _CharT* __s = 895 ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data; 896 uninitialized_copy_n(__s + __start_pos + _M_start, __req_len, 897 __buffer); 898 } 899 break; 900 default: 901 break; 902 } 903 } 904 905 typedef typename _Rope_rep_base<_CharT, _Alloc>::allocator_type 906 allocator_type; 907 908 _Rope_RopeSubstring(_Rope_RopeRep<_CharT, _Alloc>* __b, size_type __s, 909 size_type __l, const allocator_type& __a) 910 : _Rope_RopeFunction<_CharT, _Alloc>(this, __l, false, __a), 911 char_producer<_CharT>(), _M_base(__b), _M_start(__s) 912 { 913 #ifndef __GC 914 _M_base->_M_ref_nonnil(); 915 #endif 916 this->_M_tag = __detail::_S_substringfn; 917 } 918 virtual ~_Rope_RopeSubstring() throw() 919 { 920 #ifndef __GC 921 _M_base->_M_unref_nonnil(); 922 // _M_free_c_string(); -- done by parent class 923 #endif 924 } 925 }; 926 927 // Self-destructing pointers to Rope_rep. 928 // These are not conventional smart pointers. Their 929 // only purpose in life is to ensure that unref is called 930 // on the pointer either at normal exit or if an exception 931 // is raised. It is the caller's responsibility to 932 // adjust reference counts when these pointers are initialized 933 // or assigned to. (This convention significantly reduces 934 // the number of potentially expensive reference count 935 // updates.) 936 #ifndef __GC 937 template
938 struct _Rope_self_destruct_ptr 939 { 940 _Rope_RopeRep<_CharT, _Alloc>* _M_ptr; 941 942 ~_Rope_self_destruct_ptr() 943 { _Rope_RopeRep<_CharT, _Alloc>::_S_unref(_M_ptr); } 944 #if __cpp_exceptions 945 _Rope_self_destruct_ptr() : _M_ptr(0) { } 946 #else 947 _Rope_self_destruct_ptr() { } 948 #endif 949 _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT, _Alloc>* __p) 950 : _M_ptr(__p) { } 951 952 _Rope_RopeRep<_CharT, _Alloc>& 953 operator*() 954 { return *_M_ptr; } 955 956 _Rope_RopeRep<_CharT, _Alloc>* 957 operator->() 958 { return _M_ptr; } 959 960 operator _Rope_RopeRep<_CharT, _Alloc>*() 961 { return _M_ptr; } 962 963 _Rope_self_destruct_ptr& 964 operator=(_Rope_RopeRep<_CharT, _Alloc>* __x) 965 { _M_ptr = __x; return *this; } 966 }; 967 #endif 968 969 // Dereferencing a nonconst iterator has to return something 970 // that behaves almost like a reference. It's not possible to 971 // return an actual reference since assignment requires extra 972 // work. And we would get into the same problems as with the 973 // CD2 version of basic_string. 974 template
975 class _Rope_char_ref_proxy 976 { 977 friend class rope<_CharT, _Alloc>; 978 friend class _Rope_iterator<_CharT, _Alloc>; 979 friend class _Rope_char_ptr_proxy<_CharT, _Alloc>; 980 #ifdef __GC 981 typedef _Rope_RopeRep<_CharT, _Alloc>* _Self_destruct_ptr; 982 #else 983 typedef _Rope_self_destruct_ptr<_CharT, _Alloc> _Self_destruct_ptr; 984 #endif 985 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; 986 typedef rope<_CharT, _Alloc> _My_rope; 987 std::size_t _M_pos; 988 _CharT _M_current; 989 bool _M_current_valid; 990 _My_rope* _M_root; // The whole rope. 991 public: 992 _Rope_char_ref_proxy(_My_rope* __r, std::size_t __p) 993 : _M_pos(__p), _M_current(), _M_current_valid(false), _M_root(__r) { } 994 995 _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x) 996 : _M_pos(__x._M_pos), _M_current(__x._M_current), 997 _M_current_valid(false), _M_root(__x._M_root) { } 998 999 // Don't preserve cache if the reference can outlive the 1000 // expression. We claim that's not possible without calling 1001 // a copy constructor or generating reference to a proxy 1002 // reference. We declare the latter to have undefined semantics. 1003 _Rope_char_ref_proxy(_My_rope* __r, std::size_t __p, _CharT __c) 1004 : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) { } 1005 1006 inline operator _CharT () const; 1007 1008 _Rope_char_ref_proxy& 1009 operator=(_CharT __c); 1010 1011 _Rope_char_ptr_proxy<_CharT, _Alloc> operator&() const; 1012 1013 _Rope_char_ref_proxy& 1014 operator=(const _Rope_char_ref_proxy& __c) 1015 { return operator=((_CharT)__c); } 1016 }; 1017 1018 template
1019 inline void 1020 swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, 1021 _Rope_char_ref_proxy <_CharT, __Alloc > __b) 1022 { 1023 _CharT __tmp = __a; 1024 __a = __b; 1025 __b = __tmp; 1026 } 1027 1028 template
1029 class _Rope_char_ptr_proxy 1030 { 1031 // XXX this class should be rewritten. 1032 friend class _Rope_char_ref_proxy<_CharT, _Alloc>; 1033 std::size_t _M_pos; 1034 rope<_CharT,_Alloc>* _M_root; // The whole rope. 1035 public: 1036 _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x) 1037 : _M_pos(__x._M_pos), _M_root(__x._M_root) { } 1038 1039 _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x) 1040 : _M_pos(__x._M_pos), _M_root(__x._M_root) { } 1041 1042 _Rope_char_ptr_proxy() { } 1043 1044 _Rope_char_ptr_proxy(_CharT* __x) 1045 : _M_root(0), _M_pos(0) { } 1046 1047 _Rope_char_ptr_proxy& 1048 operator=(const _Rope_char_ptr_proxy& __x) 1049 { 1050 _M_pos = __x._M_pos; 1051 _M_root = __x._M_root; 1052 return *this; 1053 } 1054 1055 template
1056 friend bool 1057 operator==(const _Rope_char_ptr_proxy<_CharT2, _Alloc2>& __x, 1058 const _Rope_char_ptr_proxy<_CharT2, _Alloc2>& __y); 1059 1060 _Rope_char_ref_proxy<_CharT, _Alloc> operator*() const 1061 { return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root, _M_pos); } 1062 }; 1063 1064 // Rope iterators: 1065 // Unlike in the C version, we cache only part of the stack 1066 // for rope iterators, since they must be efficiently copyable. 1067 // When we run out of cache, we have to reconstruct the iterator 1068 // value. 1069 // Pointers from iterators are not included in reference counts. 1070 // Iterators are assumed to be thread private. Ropes can 1071 // be shared. 1072 1073 // Ignore warnings about std::iterator 1074 #pragma GCC diagnostic push 1075 #pragma GCC diagnostic ignored "-Wdeprecated-declarations" 1076 template
1077 class _Rope_iterator_base 1078 : public std::iterator
1079 { 1080 friend class rope<_CharT, _Alloc>; 1081 public: 1082 typedef _Alloc _allocator_type; // used in _Rope_rotate, VC++ workaround 1083 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; 1084 // Borland doesn't want this to be protected. 1085 protected: 1086 enum { _S_path_cache_len = 4 }; // Must be <= 9. 1087 enum { _S_iterator_buf_len = 15 }; 1088 std::size_t _M_current_pos; 1089 _RopeRep* _M_root; // The whole rope. 1090 std::size_t _M_leaf_pos; // Starting position for current leaf 1091 __GC_CONST _CharT* _M_buf_start; 1092 // Buffer possibly 1093 // containing current char. 1094 __GC_CONST _CharT* _M_buf_ptr; 1095 // Pointer to current char in buffer. 1096 // != 0 ==> buffer valid. 1097 __GC_CONST _CharT* _M_buf_end; 1098 // One past __last valid char in buffer. 1099 // What follows is the path cache. We go out of our 1100 // way to make this compact. 1101 // Path_end contains the bottom section of the path from 1102 // the root to the current leaf. 1103 const _RopeRep* _M_path_end[_S_path_cache_len]; 1104 int _M_leaf_index; // Last valid __pos in path_end; 1105 // _M_path_end[0] ... _M_path_end[leaf_index-1] 1106 // point to concatenation nodes. 1107 unsigned char _M_path_directions; 1108 // (path_directions >> __i) & 1 is 1 1109 // iff we got from _M_path_end[leaf_index - __i - 1] 1110 // to _M_path_end[leaf_index - __i] by going to the 1111 // __right. Assumes path_cache_len <= 9. 1112 _CharT _M_tmp_buf[_S_iterator_buf_len]; 1113 // Short buffer for surrounding chars. 1114 // This is useful primarily for 1115 // RopeFunctions. We put the buffer 1116 // here to avoid locking in the 1117 // multithreaded case. 1118 // The cached path is generally assumed to be valid 1119 // only if the buffer is valid. 1120 static void _S_setbuf(_Rope_iterator_base& __x); 1121 // Set buffer contents given 1122 // path cache. 1123 static void _S_setcache(_Rope_iterator_base& __x); 1124 // Set buffer contents and 1125 // path cache. 1126 static void _S_setcache_for_incr(_Rope_iterator_base& __x); 1127 // As above, but assumes path 1128 // cache is valid for previous posn. 1129 _Rope_iterator_base() { } 1130 1131 _Rope_iterator_base(_RopeRep* __root, std::size_t __pos) 1132 : _M_current_pos(__pos), _M_root(__root), _M_buf_ptr(0) { } 1133 1134 void _M_incr(std::size_t __n); 1135 void _M_decr(std::size_t __n); 1136 public: 1137 std::size_t 1138 index() const 1139 { return _M_current_pos; } 1140 1141 _Rope_iterator_base(const _Rope_iterator_base& __x) 1142 { 1143 if (0 != __x._M_buf_ptr && __x._M_buf_start != __x._M_tmp_buf) 1144 *this = __x; 1145 else 1146 { 1147 _M_current_pos = __x._M_current_pos; 1148 _M_root = __x._M_root; 1149 _M_buf_ptr = 0; 1150 } 1151 } 1152 }; 1153 #pragma GCC diagnostic pop 1154 1155 template
1156 class _Rope_iterator; 1157 1158 template
1159 class _Rope_const_iterator 1160 : public _Rope_iterator_base<_CharT, _Alloc> 1161 { 1162 friend class rope<_CharT, _Alloc>; 1163 protected: 1164 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; 1165 // The one from the base class may not be directly visible. 1166 _Rope_const_iterator(const _RopeRep* __root, std::size_t __pos) 1167 : _Rope_iterator_base<_CharT, _Alloc>(const_cast<_RopeRep*>(__root), 1168 __pos) 1169 // Only nonconst iterators modify root ref count 1170 { } 1171 public: 1172 typedef _CharT reference; // Really a value. Returning a reference 1173 // Would be a mess, since it would have 1174 // to be included in refcount. 1175 typedef const _CharT* pointer; 1176 1177 public: 1178 _Rope_const_iterator() { } 1179 1180 _Rope_const_iterator(const _Rope_const_iterator& __x) 1181 : _Rope_iterator_base<_CharT,_Alloc>(__x) { } 1182 1183 _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x); 1184 1185 _Rope_const_iterator(const rope<_CharT, _Alloc>& __r, std::size_t __pos) 1186 : _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) { } 1187 1188 _Rope_const_iterator& 1189 operator=(const _Rope_const_iterator& __x) 1190 { 1191 if (0 != __x._M_buf_ptr && __x._M_buf_start != __x._M_tmp_buf) 1192 *(static_cast<_Rope_iterator_base<_CharT, _Alloc>*>(this)) = __x; 1193 else 1194 { 1195 this->_M_current_pos = __x._M_current_pos; 1196 this->_M_root = __x._M_root; 1197 this->_M_buf_ptr = 0; 1198 } 1199 return(*this); 1200 } 1201 1202 reference 1203 operator*() 1204 { 1205 if (0 == this->_M_buf_ptr) 1206 this->_S_setcache(*this); 1207 return *this->_M_buf_ptr; 1208 } 1209 1210 // Without this const version, Rope iterators do not meet the 1211 // requirements of an Input Iterator. 1212 reference 1213 operator*() const 1214 { 1215 return *const_cast<_Rope_const_iterator&>(*this); 1216 } 1217 1218 _Rope_const_iterator& 1219 operator++() 1220 { 1221 __GC_CONST _CharT* __next; 1222 if (0 != this->_M_buf_ptr 1223 && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) 1224 { 1225 this->_M_buf_ptr = __next; 1226 ++this->_M_current_pos; 1227 } 1228 else 1229 this->_M_incr(1); 1230 return *this; 1231 } 1232 1233 _Rope_const_iterator& 1234 operator+=(std::ptrdiff_t __n) 1235 { 1236 if (__n >= 0) 1237 this->_M_incr(__n); 1238 else 1239 this->_M_decr(-__n); 1240 return *this; 1241 } 1242 1243 _Rope_const_iterator& 1244 operator--() 1245 { 1246 this->_M_decr(1); 1247 return *this; 1248 } 1249 1250 _Rope_const_iterator& 1251 operator-=(std::ptrdiff_t __n) 1252 { 1253 if (__n >= 0) 1254 this->_M_decr(__n); 1255 else 1256 this->_M_incr(-__n); 1257 return *this; 1258 } 1259 1260 _Rope_const_iterator 1261 operator++(int) 1262 { 1263 std::size_t __old_pos = this->_M_current_pos; 1264 this->_M_incr(1); 1265 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos); 1266 // This makes a subsequent dereference expensive. 1267 // Perhaps we should instead copy the iterator 1268 // if it has a valid cache? 1269 } 1270 1271 _Rope_const_iterator 1272 operator--(int) 1273 { 1274 std::size_t __old_pos = this->_M_current_pos; 1275 this->_M_decr(1); 1276 return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos); 1277 } 1278 1279 template
1280 friend _Rope_const_iterator<_CharT2, _Alloc2> 1281 operator-(const _Rope_const_iterator<_CharT2, _Alloc2>& __x, 1282 std::ptrdiff_t __n); 1283 1284 template
1285 friend _Rope_const_iterator<_CharT2, _Alloc2> 1286 operator+(const _Rope_const_iterator<_CharT2, _Alloc2>& __x, 1287 std::ptrdiff_t __n); 1288 1289 template
1290 friend _Rope_const_iterator<_CharT2, _Alloc2> 1291 operator+(std::ptrdiff_t __n, 1292 const _Rope_const_iterator<_CharT2, _Alloc2>& __x); 1293 1294 reference 1295 operator[](std::size_t __n) 1296 { return rope<_CharT, _Alloc>::_S_fetch(this->_M_root, 1297 this->_M_current_pos + __n); } 1298 1299 template
1300 friend bool 1301 operator==(const _Rope_const_iterator<_CharT2, _Alloc2>& __x, 1302 const _Rope_const_iterator<_CharT2, _Alloc2>& __y); 1303 1304 template
1305 friend bool 1306 operator<(const _Rope_const_iterator<_CharT2, _Alloc2>& __x, 1307 const _Rope_const_iterator<_CharT2, _Alloc2>& __y); 1308 1309 template
1310 friend std::ptrdiff_t 1311 operator-(const _Rope_const_iterator<_CharT2, _Alloc2>& __x, 1312 const _Rope_const_iterator<_CharT2, _Alloc2>& __y); 1313 }; 1314 1315 template
1316 class _Rope_iterator 1317 : public _Rope_iterator_base<_CharT, _Alloc> 1318 { 1319 friend class rope<_CharT, _Alloc>; 1320 protected: 1321 typedef typename _Rope_iterator_base<_CharT, _Alloc>::_RopeRep _RopeRep; 1322 rope<_CharT, _Alloc>* _M_root_rope; 1323 1324 // root is treated as a cached version of this, and is used to 1325 // detect changes to the underlying rope. 1326 1327 // Root is included in the reference count. This is necessary 1328 // so that we can detect changes reliably. Unfortunately, it 1329 // requires careful bookkeeping for the nonGC case. 1330 _Rope_iterator(rope<_CharT, _Alloc>* __r, std::size_t __pos) 1331 : _Rope_iterator_base<_CharT, _Alloc>(__r->_M_tree_ptr, __pos), 1332 _M_root_rope(__r) 1333 { _RopeRep::_S_ref(this->_M_root); 1334 if (!(__r -> empty())) 1335 this->_S_setcache(*this); 1336 } 1337 1338 void _M_check(); 1339 public: 1340 typedef _Rope_char_ref_proxy<_CharT, _Alloc> reference; 1341 typedef _Rope_char_ref_proxy<_CharT, _Alloc>* pointer; 1342 1343 rope<_CharT, _Alloc>& 1344 container() 1345 { return *_M_root_rope; } 1346 1347 _Rope_iterator() 1348 { 1349 this->_M_root = 0; // Needed for reference counting. 1350 } 1351 1352 _Rope_iterator(const _Rope_iterator& __x) 1353 : _Rope_iterator_base<_CharT, _Alloc>(__x) 1354 { 1355 _M_root_rope = __x._M_root_rope; 1356 _RopeRep::_S_ref(this->_M_root); 1357 } 1358 1359 _Rope_iterator(rope<_CharT, _Alloc>& __r, std::size_t __pos); 1360 1361 ~_Rope_iterator() 1362 { _RopeRep::_S_unref(this->_M_root); } 1363 1364 _Rope_iterator& 1365 operator=(const _Rope_iterator& __x) 1366 { 1367 _RopeRep* __old = this->_M_root; 1368 1369 _RopeRep::_S_ref(__x._M_root); 1370 if (0 != __x._M_buf_ptr && __x._M_buf_start != __x._M_tmp_buf) 1371 { 1372 _M_root_rope = __x._M_root_rope; 1373 *(static_cast<_Rope_iterator_base<_CharT, _Alloc>*>(this)) = __x; 1374 } 1375 else 1376 { 1377 this->_M_current_pos = __x._M_current_pos; 1378 this->_M_root = __x._M_root; 1379 _M_root_rope = __x._M_root_rope; 1380 this->_M_buf_ptr = 0; 1381 } 1382 _RopeRep::_S_unref(__old); 1383 return(*this); 1384 } 1385 1386 reference 1387 operator*() 1388 { 1389 _M_check(); 1390 if (0 == this->_M_buf_ptr) 1391 return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope, 1392 this->_M_current_pos); 1393 else 1394 return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope, 1395 this->_M_current_pos, 1396 *this->_M_buf_ptr); 1397 } 1398 1399 // See above comment. 1400 reference 1401 operator*() const 1402 { 1403 return *const_cast<_Rope_iterator&>(*this); 1404 } 1405 1406 _Rope_iterator& 1407 operator++() 1408 { 1409 this->_M_incr(1); 1410 return *this; 1411 } 1412 1413 _Rope_iterator& 1414 operator+=(std::ptrdiff_t __n) 1415 { 1416 if (__n >= 0) 1417 this->_M_incr(__n); 1418 else 1419 this->_M_decr(-__n); 1420 return *this; 1421 } 1422 1423 _Rope_iterator& 1424 operator--() 1425 { 1426 this->_M_decr(1); 1427 return *this; 1428 } 1429 1430 _Rope_iterator& 1431 operator-=(std::ptrdiff_t __n) 1432 { 1433 if (__n >= 0) 1434 this->_M_decr(__n); 1435 else 1436 this->_M_incr(-__n); 1437 return *this; 1438 } 1439 1440 _Rope_iterator 1441 operator++(int) 1442 { 1443 std::size_t __old_pos = this->_M_current_pos; 1444 this->_M_incr(1); 1445 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos); 1446 } 1447 1448 _Rope_iterator 1449 operator--(int) 1450 { 1451 std::size_t __old_pos = this->_M_current_pos; 1452 this->_M_decr(1); 1453 return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos); 1454 } 1455 1456 reference 1457 operator[](std::ptrdiff_t __n) 1458 { return _Rope_char_ref_proxy<_CharT, _Alloc>(_M_root_rope, 1459 this->_M_current_pos 1460 + __n); } 1461 1462 template
1463 friend bool 1464 operator==(const _Rope_iterator<_CharT2, _Alloc2>& __x, 1465 const _Rope_iterator<_CharT2, _Alloc2>& __y); 1466 1467 template
1468 friend bool 1469 operator<(const _Rope_iterator<_CharT2, _Alloc2>& __x, 1470 const _Rope_iterator<_CharT2, _Alloc2>& __y); 1471 1472 template
1473 friend std::ptrdiff_t 1474 operator-(const _Rope_iterator<_CharT2, _Alloc2>& __x, 1475 const _Rope_iterator<_CharT2, _Alloc2>& __y); 1476 1477 template
1478 friend _Rope_iterator<_CharT2, _Alloc2> 1479 operator-(const _Rope_iterator<_CharT2, _Alloc2>& __x, 1480 std::ptrdiff_t __n); 1481 1482 template
1483 friend _Rope_iterator<_CharT2, _Alloc2> 1484 operator+(const _Rope_iterator<_CharT2, _Alloc2>& __x, 1485 std::ptrdiff_t __n); 1486 1487 template
1488 friend _Rope_iterator<_CharT2, _Alloc2> 1489 operator+(std::ptrdiff_t __n, 1490 const _Rope_iterator<_CharT2, _Alloc2>& __x); 1491 }; 1492 1493 1494 template
1495 struct _Rope_base 1496 : public _Alloc 1497 { 1498 typedef _Alloc allocator_type; 1499 1500 allocator_type 1501 get_allocator() const 1502 { return *static_cast
(this); } 1503 1504 allocator_type& 1505 _M_get_allocator() 1506 { return *static_cast<_Alloc*>(this); } 1507 1508 const allocator_type& 1509 _M_get_allocator() const 1510 { return *static_cast
(this); } 1511 1512 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; 1513 // The one in _Base may not be visible due to template rules. 1514 1515 _Rope_base(_RopeRep* __t, const allocator_type&) 1516 : _M_tree_ptr(__t) { } 1517 1518 _Rope_base(const allocator_type&) { } 1519 1520 // The only data member of a rope: 1521 _RopeRep *_M_tree_ptr; 1522 1523 #define __ROPE_DEFINE_ALLOC(_Tp, __name) \ 1524 typedef typename \ 1525 __alloc_traits<_Alloc>::template rebind<_Tp>::other __name##Alloc; \ 1526 static _Tp* __name##_allocate(std::size_t __n) \ 1527 { return __name##Alloc().allocate(__n); } \ 1528 static void __name##_deallocate(_Tp *__p, std::size_t __n) \ 1529 { __name##Alloc().deallocate(__p, __n); } 1530 __ROPE_DEFINE_ALLOCS(_Alloc) 1531 #undef __ROPE_DEFINE_ALLOC 1532 1533 protected: 1534 _Rope_base& 1535 operator=(const _Rope_base&); 1536 1537 _Rope_base(const _Rope_base&); 1538 }; 1539 1540 /** 1541 * This is an SGI extension. 1542 * @ingroup SGIextensions 1543 * @doctodo 1544 */ 1545 template
1546 class rope : public _Rope_base<_CharT, _Alloc> 1547 { 1548 public: 1549 typedef _CharT value_type; 1550 typedef std::ptrdiff_t difference_type; 1551 typedef std::size_t size_type; 1552 typedef _CharT const_reference; 1553 typedef const _CharT* const_pointer; 1554 typedef _Rope_iterator<_CharT, _Alloc> iterator; 1555 typedef _Rope_const_iterator<_CharT, _Alloc> const_iterator; 1556 typedef _Rope_char_ref_proxy<_CharT, _Alloc> reference; 1557 typedef _Rope_char_ptr_proxy<_CharT, _Alloc> pointer; 1558 1559 friend class _Rope_iterator<_CharT, _Alloc>; 1560 friend class _Rope_const_iterator<_CharT, _Alloc>; 1561 friend struct _Rope_RopeRep<_CharT, _Alloc>; 1562 friend class _Rope_iterator_base<_CharT, _Alloc>; 1563 friend class _Rope_char_ptr_proxy<_CharT, _Alloc>; 1564 friend class _Rope_char_ref_proxy<_CharT, _Alloc>; 1565 friend struct _Rope_RopeSubstring<_CharT, _Alloc>; 1566 1567 protected: 1568 typedef _Rope_base<_CharT, _Alloc> _Base; 1569 typedef typename _Base::allocator_type allocator_type; 1570 using _Base::_M_tree_ptr; 1571 using _Base::get_allocator; 1572 using _Base::_M_get_allocator; 1573 typedef __GC_CONST _CharT* _Cstrptr; 1574 1575 static _CharT _S_empty_c_str[1]; 1576 1577 static bool 1578 _S_is0(_CharT __c) 1579 { return __c == _S_eos((_CharT*)0); } 1580 1581 enum { _S_copy_max = 23 }; 1582 // For strings shorter than _S_copy_max, we copy to 1583 // concatenate. 1584 1585 typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep; 1586 typedef _Rope_RopeConcatenation<_CharT, _Alloc> _RopeConcatenation; 1587 typedef _Rope_RopeLeaf<_CharT, _Alloc> _RopeLeaf; 1588 typedef _Rope_RopeFunction<_CharT, _Alloc> _RopeFunction; 1589 typedef _Rope_RopeSubstring<_CharT, _Alloc> _RopeSubstring; 1590 1591 // Retrieve a character at the indicated position. 1592 static _CharT _S_fetch(_RopeRep* __r, size_type __pos); 1593 1594 #ifndef __GC 1595 // Obtain a pointer to the character at the indicated position. 1596 // The pointer can be used to change the character. 1597 // If such a pointer cannot be produced, as is frequently the 1598 // case, 0 is returned instead. 1599 // (Returns nonzero only if all nodes in the path have a refcount 1600 // of 1.) 1601 static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos); 1602 #endif 1603 1604 static bool 1605 _S_apply_to_pieces(// should be template parameter 1606 _Rope_char_consumer<_CharT>& __c, 1607 const _RopeRep* __r, 1608 size_type __begin, size_type __end); 1609 // begin and end are assumed to be in range. 1610 1611 #ifndef __GC 1612 static void 1613 _S_unref(_RopeRep* __t) 1614 { _RopeRep::_S_unref(__t); } 1615 1616 static void 1617 _S_ref(_RopeRep* __t) 1618 { _RopeRep::_S_ref(__t); } 1619 1620 #else /* __GC */ 1621 static void _S_unref(_RopeRep*) { } 1622 static void _S_ref(_RopeRep*) { } 1623 #endif 1624 1625 #ifdef __GC 1626 typedef _Rope_RopeRep<_CharT, _Alloc>* _Self_destruct_ptr; 1627 #else 1628 typedef _Rope_self_destruct_ptr<_CharT, _Alloc> _Self_destruct_ptr; 1629 #endif 1630 1631 // _Result is counted in refcount. 1632 static _RopeRep* _S_substring(_RopeRep* __base, 1633 size_type __start, size_type __endp1); 1634 1635 static _RopeRep* _S_concat_char_iter(_RopeRep* __r, 1636 const _CharT* __iter, 1637 size_type __slen, 1638 allocator_type& __a); 1639 // Concatenate rope and char ptr, copying __iter. 1640 // Should really take an arbitrary iterator. 1641 // Result is counted in refcount. 1642 static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r, 1643 const _CharT* __iter, 1644 size_type __slen, 1645 allocator_type& __a) 1646 // As above, but one reference to __r is about to be 1647 // destroyed. Thus the pieces may be recycled if all 1648 // relevant reference counts are 1. 1649 #ifdef __GC 1650 // We can't really do anything since refcounts are unavailable. 1651 { return _S_concat_char_iter(__r, __iter, __slen, __a); } 1652 #else 1653 ; 1654 #endif 1655 1656 static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right); 1657 // General concatenation on _RopeRep. _Result 1658 // has refcount of 1. Adjusts argument refcounts. 1659 1660 public: 1661 void 1662 apply_to_pieces(size_type __begin, size_type __end, 1663 _Rope_char_consumer<_CharT>& __c) const 1664 { _S_apply_to_pieces(__c, this->_M_tree_ptr, __begin, __end); } 1665 1666 protected: 1667 1668 static size_type 1669 _S_rounded_up_size(size_type __n) 1670 { return _RopeLeaf::_S_rounded_up_size(__n); } 1671 1672 static size_type 1673 _S_allocated_capacity(size_type __n) 1674 { 1675 if (_S_is_basic_char_type((_CharT*)0)) 1676 return _S_rounded_up_size(__n) - 1; 1677 else 1678 return _S_rounded_up_size(__n); 1679 1680 } 1681 1682 // Allocate and construct a RopeLeaf using the supplied allocator 1683 // Takes ownership of s instead of copying. 1684 static _RopeLeaf* 1685 _S_new_RopeLeaf(__GC_CONST _CharT *__s, 1686 size_type __size, allocator_type& __a) 1687 { 1688 _RopeLeaf* __space = typename _Base::_LAlloc(__a).allocate(1); 1689 return new(__space) _RopeLeaf(__s, __size, __a); 1690 } 1691 1692 static _RopeConcatenation* 1693 _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right, 1694 allocator_type& __a) 1695 { 1696 _RopeConcatenation* __space = typename _Base::_CAlloc(__a).allocate(1); 1697 return new(__space) _RopeConcatenation(__left, __right, __a); 1698 } 1699 1700 static _RopeFunction* 1701 _S_new_RopeFunction(char_producer<_CharT>* __f, 1702 size_type __size, bool __d, allocator_type& __a) 1703 { 1704 _RopeFunction* __space = typename _Base::_FAlloc(__a).allocate(1); 1705 return new(__space) _RopeFunction(__f, __size, __d, __a); 1706 } 1707 1708 static _RopeSubstring* 1709 _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_type __s, 1710 size_type __l, allocator_type& __a) 1711 { 1712 _RopeSubstring* __space = typename _Base::_SAlloc(__a).allocate(1); 1713 return new(__space) _RopeSubstring(__b, __s, __l, __a); 1714 } 1715 1716 static _RopeLeaf* 1717 _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s, 1718 size_type __size, allocator_type& __a) 1719 #define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \ 1720 _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a) 1721 { 1722 if (0 == __size) 1723 return 0; 1724 _CharT* __buf = __a.allocate(_S_rounded_up_size(__size)); 1725 1726 __uninitialized_copy_n_a(__s, __size, __buf, __a); 1727 _S_cond_store_eos(__buf[__size]); 1728 __try 1729 { return _S_new_RopeLeaf(__buf, __size, __a); } 1730 __catch(...) 1731 { 1732 _RopeRep::__STL_FREE_STRING(__buf, __size, __a); 1733 __throw_exception_again; 1734 } 1735 } 1736 1737 // Concatenation of nonempty strings. 1738 // Always builds a concatenation node. 1739 // Rebalances if the result is too deep. 1740 // Result has refcount 1. 1741 // Does not increment left and right ref counts even though 1742 // they are referenced. 1743 static _RopeRep* 1744 _S_tree_concat(_RopeRep* __left, _RopeRep* __right); 1745 1746 // Concatenation helper functions 1747 static _RopeLeaf* 1748 _S_leaf_concat_char_iter(_RopeLeaf* __r, 1749 const _CharT* __iter, size_type __slen); 1750 // Concatenate by copying leaf. 1751 // should take an arbitrary iterator 1752 // result has refcount 1. 1753 #ifndef __GC 1754 static _RopeLeaf* 1755 _S_destr_leaf_concat_char_iter(_RopeLeaf* __r, 1756 const _CharT* __iter, size_type __slen); 1757 // A version that potentially clobbers __r if __r->_M_ref_count == 1. 1758 #endif 1759 1760 private: 1761 1762 static size_type _S_char_ptr_len(const _CharT* __s); 1763 // slightly generalized strlen 1764 1765 rope(_RopeRep* __t, const allocator_type& __a = allocator_type()) 1766 : _Base(__t, __a) { } 1767 1768 1769 // Copy __r to the _CharT buffer. 1770 // Returns __buffer + __r->_M_size. 1771 // Assumes that buffer is uninitialized. 1772 static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer); 1773 1774 // Again, with explicit starting position and length. 1775 // Assumes that buffer is uninitialized. 1776 static _CharT* _S_flatten(_RopeRep* __r, 1777 size_type __start, size_type __len, 1778 _CharT* __buffer); 1779 1780 static const unsigned long 1781 _S_min_len[__detail::_S_max_rope_depth + 1]; 1782 1783 static bool 1784 _S_is_balanced(_RopeRep* __r) 1785 { return (__r->_M_size >= _S_min_len[__r->_M_depth]); } 1786 1787 static bool 1788 _S_is_almost_balanced(_RopeRep* __r) 1789 { return (__r->_M_depth == 0 1790 || __r->_M_size >= _S_min_len[__r->_M_depth - 1]); } 1791 1792 static bool 1793 _S_is_roughly_balanced(_RopeRep* __r) 1794 { return (__r->_M_depth <= 1 1795 || __r->_M_size >= _S_min_len[__r->_M_depth - 2]); } 1796 1797 // Assumes the result is not empty. 1798 static _RopeRep* 1799 _S_concat_and_set_balanced(_RopeRep* __left, _RopeRep* __right) 1800 { 1801 _RopeRep* __result = _S_concat(__left, __right); 1802 if (_S_is_balanced(__result)) 1803 __result->_M_is_balanced = true; 1804 return __result; 1805 } 1806 1807 // The basic rebalancing operation. Logically copies the 1808 // rope. The result has refcount of 1. The client will 1809 // usually decrement the reference count of __r. 1810 // The result is within height 2 of balanced by the above 1811 // definition. 1812 static _RopeRep* _S_balance(_RopeRep* __r); 1813 1814 // Add all unbalanced subtrees to the forest of balanced trees. 1815 // Used only by balance. 1816 static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest); 1817 1818 // Add __r to forest, assuming __r is already balanced. 1819 static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest); 1820 1821 // Print to stdout, exposing structure 1822 static void _S_dump(_RopeRep* __r, int __indent = 0); 1823 1824 // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp. 1825 static int _S_compare(const _RopeRep* __x, const _RopeRep* __y); 1826 1827 public: 1828 _GLIBCXX_NODISCARD bool 1829 empty() const 1830 { return 0 == this->_M_tree_ptr; } 1831 1832 // Comparison member function. This is public only for those 1833 // clients that need a ternary comparison. Others 1834 // should use the comparison operators below. 1835 int 1836 compare(const rope& __y) const 1837 { return _S_compare(this->_M_tree_ptr, __y._M_tree_ptr); } 1838 1839 rope(const _CharT* __s, const allocator_type& __a = allocator_type()) 1840 : _Base(__a) 1841 { 1842 this->_M_tree_ptr = 1843 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s), 1844 _M_get_allocator()); 1845 } 1846 1847 rope(const _CharT* __s, size_type __len, 1848 const allocator_type& __a = allocator_type()) 1849 : _Base(__a) 1850 { 1851 this->_M_tree_ptr = 1852 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, _M_get_allocator()); 1853 } 1854 1855 // Should perhaps be templatized with respect to the iterator type 1856 // and use Sequence_buffer. (It should perhaps use sequence_buffer 1857 // even now.) 1858 rope(const _CharT* __s, const _CharT* __e, 1859 const allocator_type& __a = allocator_type()) 1860 : _Base(__a) 1861 { 1862 this->_M_tree_ptr = 1863 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, _M_get_allocator()); 1864 } 1865 1866 rope(const const_iterator& __s, const const_iterator& __e, 1867 const allocator_type& __a = allocator_type()) 1868 : _Base(_S_substring(__s._M_root, __s._M_current_pos, 1869 __e._M_current_pos), __a) 1870 { } 1871 1872 rope(const iterator& __s, const iterator& __e, 1873 const allocator_type& __a = allocator_type()) 1874 : _Base(_S_substring(__s._M_root, __s._M_current_pos, 1875 __e._M_current_pos), __a) 1876 { } 1877 1878 rope(_CharT __c, const allocator_type& __a = allocator_type()) 1879 : _Base(__a) 1880 { 1881 _CharT* __buf = this->_Data_allocate(_S_rounded_up_size(1)); 1882 1883 __alloc_traits
::construct(_M_get_allocator(), 1884 __buf, __c); 1885 __try 1886 { 1887 this->_M_tree_ptr = _S_new_RopeLeaf(__buf, 1, 1888 _M_get_allocator()); 1889 } 1890 __catch(...) 1891 { 1892 _RopeRep::__STL_FREE_STRING(__buf, 1, _M_get_allocator()); 1893 __throw_exception_again; 1894 } 1895 } 1896 1897 rope(size_type __n, _CharT __c, 1898 const allocator_type& __a = allocator_type()); 1899 1900 rope(const allocator_type& __a = allocator_type()) 1901 : _Base(0, __a) { } 1902 1903 // Construct a rope from a function that can compute its members 1904 rope(char_producer<_CharT> *__fn, size_type __len, bool __delete_fn, 1905 const allocator_type& __a = allocator_type()) 1906 : _Base(__a) 1907 { 1908 this->_M_tree_ptr = (0 == __len) 1909 ? 0 1910 : _S_new_RopeFunction(__fn, __len, __delete_fn, _M_get_allocator()); 1911 } 1912 1913 rope(const rope& __x, const allocator_type& __a = allocator_type()) 1914 : _Base(__x._M_tree_ptr, __a) 1915 { _S_ref(this->_M_tree_ptr); } 1916 1917 ~rope() throw() 1918 { _S_unref(this->_M_tree_ptr); } 1919 1920 rope& 1921 operator=(const rope& __x) 1922 { 1923 _RopeRep* __old = this->_M_tree_ptr; 1924 this->_M_tree_ptr = __x._M_tree_ptr; 1925 _S_ref(this->_M_tree_ptr); 1926 _S_unref(__old); 1927 return *this; 1928 } 1929 1930 void 1931 clear() 1932 { 1933 _S_unref(this->_M_tree_ptr); 1934 this->_M_tree_ptr = 0; 1935 } 1936 1937 void 1938 push_back(_CharT __x) 1939 { 1940 allocator_type __a = _M_get_allocator(); 1941 _RopeRep* __old = this->_M_tree_ptr; 1942 this->_M_tree_ptr 1943 = _S_destr_concat_char_iter(this->_M_tree_ptr, &__x, 1, __a); 1944 _S_unref(__old); 1945 } 1946 1947 void 1948 pop_back() 1949 { 1950 _RopeRep* __old = this->_M_tree_ptr; 1951 this->_M_tree_ptr = _S_substring(this->_M_tree_ptr, 1952 0, this->_M_tree_ptr->_M_size - 1); 1953 _S_unref(__old); 1954 } 1955 1956 _CharT 1957 back() const 1958 { return _S_fetch(this->_M_tree_ptr, this->_M_tree_ptr->_M_size - 1); } 1959 1960 void 1961 push_front(_CharT __x) 1962 { 1963 _RopeRep* __old = this->_M_tree_ptr; 1964 _RopeRep* __left = 1965 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, _M_get_allocator()); 1966 __try 1967 { 1968 this->_M_tree_ptr = _S_concat(__left, this->_M_tree_ptr); 1969 _S_unref(__old); 1970 _S_unref(__left); 1971 } 1972 __catch(...) 1973 { 1974 _S_unref(__left); 1975 __throw_exception_again; 1976 } 1977 } 1978 1979 void 1980 pop_front() 1981 { 1982 _RopeRep* __old = this->_M_tree_ptr; 1983 this->_M_tree_ptr 1984 = _S_substring(this->_M_tree_ptr, 1, this->_M_tree_ptr->_M_size); 1985 _S_unref(__old); 1986 } 1987 1988 _CharT 1989 front() const 1990 { return _S_fetch(this->_M_tree_ptr, 0); } 1991 1992 void 1993 balance() 1994 { 1995 _RopeRep* __old = this->_M_tree_ptr; 1996 this->_M_tree_ptr = _S_balance(this->_M_tree_ptr); 1997 _S_unref(__old); 1998 } 1999 2000 void 2001 copy(_CharT* __buffer) const 2002 { 2003 _Destroy_const(__buffer, __buffer + size(), _M_get_allocator()); 2004 _S_flatten(this->_M_tree_ptr, __buffer); 2005 } 2006 2007 // This is the copy function from the standard, but 2008 // with the arguments reordered to make it consistent with the 2009 // rest of the interface. 2010 // Note that this guaranteed not to compile if the draft standard 2011 // order is assumed. 2012 size_type 2013 copy(size_type __pos, size_type __n, _CharT* __buffer) const 2014 { 2015 size_type __size = size(); 2016 size_type __len = (__pos + __n > __size? __size - __pos : __n); 2017 2018 _Destroy_const(__buffer, __buffer + __len, _M_get_allocator()); 2019 _S_flatten(this->_M_tree_ptr, __pos, __len, __buffer); 2020 return __len; 2021 } 2022 2023 // Print to stdout, exposing structure. May be useful for 2024 // performance debugging. 2025 void 2026 dump() 2027 { _S_dump(this->_M_tree_ptr); } 2028 2029 // Convert to 0 terminated string in new allocated memory. 2030 // Embedded 0s in the input do not terminate the copy. 2031 const _CharT* c_str() const; 2032 2033 // As above, but also use the flattened representation as 2034 // the new rope representation. 2035 const _CharT* replace_with_c_str(); 2036 2037 // Reclaim memory for the c_str generated flattened string. 2038 // Intentionally undocumented, since it's hard to say when this 2039 // is safe for multiple threads. 2040 void 2041 delete_c_str () 2042 { 2043 if (0 == this->_M_tree_ptr) 2044 return; 2045 if (__detail::_S_leaf == this->_M_tree_ptr->_M_tag && 2046 ((_RopeLeaf*)this->_M_tree_ptr)->_M_data == 2047 this->_M_tree_ptr->_M_c_string) 2048 { 2049 // Representation shared 2050 return; 2051 } 2052 #ifndef __GC 2053 this->_M_tree_ptr->_M_free_c_string(); 2054 #endif 2055 this->_M_tree_ptr->_M_c_string = 0; 2056 } 2057 2058 _CharT 2059 operator[] (size_type __pos) const 2060 { return _S_fetch(this->_M_tree_ptr, __pos); } 2061 2062 _CharT 2063 at(size_type __pos) const 2064 { 2065 // if (__pos >= size()) throw out_of_range; // XXX 2066 return (*this)[__pos]; 2067 } 2068 2069 const_iterator 2070 begin() const 2071 { return(const_iterator(this->_M_tree_ptr, 0)); } 2072 2073 // An easy way to get a const iterator from a non-const container. 2074 const_iterator 2075 const_begin() const 2076 { return(const_iterator(this->_M_tree_ptr, 0)); } 2077 2078 const_iterator 2079 end() const 2080 { return(const_iterator(this->_M_tree_ptr, size())); } 2081 2082 const_iterator 2083 const_end() const 2084 { return(const_iterator(this->_M_tree_ptr, size())); } 2085 2086 size_type 2087 size() const 2088 { return(0 == this->_M_tree_ptr? 0 : this->_M_tree_ptr->_M_size); } 2089 2090 size_type 2091 length() const 2092 { return size(); } 2093 2094 size_type 2095 max_size() const 2096 { 2097 return _S_min_len[int(__detail::_S_max_rope_depth) - 1] - 1; 2098 // Guarantees that the result can be sufficiently 2099 // balanced. Longer ropes will probably still work, 2100 // but it's harder to make guarantees. 2101 } 2102 2103 typedef std::reverse_iterator
const_reverse_iterator; 2104 2105 const_reverse_iterator 2106 rbegin() const 2107 { return const_reverse_iterator(end()); } 2108 2109 const_reverse_iterator 2110 const_rbegin() const 2111 { return const_reverse_iterator(end()); } 2112 2113 const_reverse_iterator 2114 rend() const 2115 { return const_reverse_iterator(begin()); } 2116 2117 const_reverse_iterator 2118 const_rend() const 2119 { return const_reverse_iterator(begin()); } 2120 2121 template
2122 friend rope<_CharT2, _Alloc2> 2123 operator+(const rope<_CharT2, _Alloc2>& __left, 2124 const rope<_CharT2, _Alloc2>& __right); 2125 2126 template
2127 friend rope<_CharT2, _Alloc2> 2128 operator+(const rope<_CharT2, _Alloc2>& __left, const _CharT2* __right); 2129 2130 template
2131 friend rope<_CharT2, _Alloc2> 2132 operator+(const rope<_CharT2, _Alloc2>& __left, _CharT2 __right); 2133 2134 // The symmetric cases are intentionally omitted, since they're 2135 // presumed to be less common, and we don't handle them as well. 2136 2137 // The following should really be templatized. The first 2138 // argument should be an input iterator or forward iterator with 2139 // value_type _CharT. 2140 rope& 2141 append(const _CharT* __iter, size_type __n) 2142 { 2143 allocator_type __a = _M_get_allocator(); 2144 _RopeRep* __result = 2145 _S_destr_concat_char_iter(this->_M_tree_ptr, __iter, __n, __a); 2146 _S_unref(this->_M_tree_ptr); 2147 this->_M_tree_ptr = __result; 2148 return *this; 2149 } 2150 2151 rope& 2152 append(const _CharT* __c_string) 2153 { 2154 size_type __len = _S_char_ptr_len(__c_string); 2155 append(__c_string, __len); 2156 return(*this); 2157 } 2158 2159 rope& 2160 append(const _CharT* __s, const _CharT* __e) 2161 { 2162 allocator_type __a = _M_get_allocator(); 2163 _RopeRep* __result = 2164 _S_destr_concat_char_iter(this->_M_tree_ptr, __s, __e - __s, __a); 2165 _S_unref(this->_M_tree_ptr); 2166 this->_M_tree_ptr = __result; 2167 return *this; 2168 } 2169 2170 rope& 2171 append(const_iterator __s, const_iterator __e) 2172 { 2173 _Self_destruct_ptr __appendee(_S_substring(__s._M_root, 2174 __s._M_current_pos, 2175 __e._M_current_pos)); 2176 _RopeRep* __result = _S_concat(this->_M_tree_ptr, 2177 (_RopeRep*)__appendee); 2178 _S_unref(this->_M_tree_ptr); 2179 this->_M_tree_ptr = __result; 2180 return *this; 2181 } 2182 2183 rope& 2184 append(_CharT __c) 2185 { 2186 allocator_type __a = _M_get_allocator(); 2187 _RopeRep* __result = 2188 _S_destr_concat_char_iter(this->_M_tree_ptr, &__c, 1, __a); 2189 _S_unref(this->_M_tree_ptr); 2190 this->_M_tree_ptr = __result; 2191 return *this; 2192 } 2193 2194 rope& 2195 append() 2196 { return append(_CharT()); } // XXX why? 2197 2198 rope& 2199 append(const rope& __y) 2200 { 2201 _RopeRep* __result = _S_concat(this->_M_tree_ptr, __y._M_tree_ptr); 2202 _S_unref(this->_M_tree_ptr); 2203 this->_M_tree_ptr = __result; 2204 return *this; 2205 } 2206 2207 rope& 2208 append(size_type __n, _CharT __c) 2209 { 2210 rope<_CharT,_Alloc> __last(__n, __c); 2211 return append(__last); 2212 } 2213 2214 void 2215 swap(rope& __b) 2216 { 2217 _RopeRep* __tmp = this->_M_tree_ptr; 2218 this->_M_tree_ptr = __b._M_tree_ptr; 2219 __b._M_tree_ptr = __tmp; 2220 } 2221 2222 protected: 2223 // Result is included in refcount. 2224 static _RopeRep* 2225 replace(_RopeRep* __old, size_type __pos1, 2226 size_type __pos2, _RopeRep* __r) 2227 { 2228 if (0 == __old) 2229 { 2230 _S_ref(__r); 2231 return __r; 2232 } 2233 _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1)); 2234 _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size)); 2235 _RopeRep* __result; 2236 2237 if (0 == __r) 2238 __result = _S_concat(__left, __right); 2239 else 2240 { 2241 _Self_destruct_ptr __left_result(_S_concat(__left, __r)); 2242 __result = _S_concat(__left_result, __right); 2243 } 2244 return __result; 2245 } 2246 2247 public: 2248 void 2249 insert(size_type __p, const rope& __r) 2250 { 2251 _RopeRep* __result = 2252 replace(this->_M_tree_ptr, __p, __p, __r._M_tree_ptr); 2253 _S_unref(this->_M_tree_ptr); 2254 this->_M_tree_ptr = __result; 2255 } 2256 2257 void 2258 insert(size_type __p, size_type __n, _CharT __c) 2259 { 2260 rope<_CharT,_Alloc> __r(__n,__c); 2261 insert(__p, __r); 2262 } 2263 2264 void 2265 insert(size_type __p, const _CharT* __i, size_type __n) 2266 { 2267 _Self_destruct_ptr __left(_S_substring(this->_M_tree_ptr, 0, __p)); 2268 _Self_destruct_ptr __right(_S_substring(this->_M_tree_ptr, 2269 __p, size())); 2270 _Self_destruct_ptr __left_result(_S_concat_char_iter(__left, __i, __n, 2271 _M_get_allocator())); 2272 // _S_ destr_concat_char_iter should be safe here. 2273 // But as it stands it's probably not a win, since __left 2274 // is likely to have additional references. 2275 _RopeRep* __result = _S_concat(__left_result, __right); 2276 _S_unref(this->_M_tree_ptr); 2277 this->_M_tree_ptr = __result; 2278 } 2279 2280 void 2281 insert(size_type __p, const _CharT* __c_string) 2282 { insert(__p, __c_string, _S_char_ptr_len(__c_string)); } 2283 2284 void 2285 insert(size_type __p, _CharT __c) 2286 { insert(__p, &__c, 1); } 2287 2288 void 2289 insert(size_type __p) 2290 { 2291 _CharT __c = _CharT(); 2292 insert(__p, &__c, 1); 2293 } 2294 2295 void 2296 insert(size_type __p, const _CharT* __i, const _CharT* __j) 2297 { 2298 rope __r(__i, __j); 2299 insert(__p, __r); 2300 } 2301 2302 void 2303 insert(size_type __p, const const_iterator& __i, 2304 const const_iterator& __j) 2305 { 2306 rope __r(__i, __j); 2307 insert(__p, __r); 2308 } 2309 2310 void 2311 insert(size_type __p, const iterator& __i, 2312 const iterator& __j) 2313 { 2314 rope __r(__i, __j); 2315 insert(__p, __r); 2316 } 2317 2318 // (position, length) versions of replace operations: 2319 2320 void 2321 replace(size_type __p, size_type __n, const rope& __r) 2322 { 2323 _RopeRep* __result = 2324 replace(this->_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr); 2325 _S_unref(this->_M_tree_ptr); 2326 this->_M_tree_ptr = __result; 2327 } 2328 2329 void 2330 replace(size_type __p, size_type __n, 2331 const _CharT* __i, size_type __i_len) 2332 { 2333 rope __r(__i, __i_len); 2334 replace(__p, __n, __r); 2335 } 2336 2337 void 2338 replace(size_type __p, size_type __n, _CharT __c) 2339 { 2340 rope __r(__c); 2341 replace(__p, __n, __r); 2342 } 2343 2344 void 2345 replace(size_type __p, size_type __n, const _CharT* __c_string) 2346 { 2347 rope __r(__c_string); 2348 replace(__p, __n, __r); 2349 } 2350 2351 void 2352 replace(size_type __p, size_type __n, 2353 const _CharT* __i, const _CharT* __j) 2354 { 2355 rope __r(__i, __j); 2356 replace(__p, __n, __r); 2357 } 2358 2359 void 2360 replace(size_type __p, size_type __n, 2361 const const_iterator& __i, const const_iterator& __j) 2362 { 2363 rope __r(__i, __j); 2364 replace(__p, __n, __r); 2365 } 2366 2367 void 2368 replace(size_type __p, size_type __n, 2369 const iterator& __i, const iterator& __j) 2370 { 2371 rope __r(__i, __j); 2372 replace(__p, __n, __r); 2373 } 2374 2375 // Single character variants: 2376 void 2377 replace(size_type __p, _CharT __c) 2378 { 2379 iterator __i(this, __p); 2380 *__i = __c; 2381 } 2382 2383 void 2384 replace(size_type __p, const rope& __r) 2385 { replace(__p, 1, __r); } 2386 2387 void 2388 replace(size_type __p, const _CharT* __i, size_type __i_len) 2389 { replace(__p, 1, __i, __i_len); } 2390 2391 void 2392 replace(size_type __p, const _CharT* __c_string) 2393 { replace(__p, 1, __c_string); } 2394 2395 void 2396 replace(size_type __p, const _CharT* __i, const _CharT* __j) 2397 { replace(__p, 1, __i, __j); } 2398 2399 void 2400 replace(size_type __p, const const_iterator& __i, 2401 const const_iterator& __j) 2402 { replace(__p, 1, __i, __j); } 2403 2404 void 2405 replace(size_type __p, const iterator& __i, 2406 const iterator& __j) 2407 { replace(__p, 1, __i, __j); } 2408 2409 // Erase, (position, size) variant. 2410 void 2411 erase(size_type __p, size_type __n) 2412 { 2413 _RopeRep* __result = replace(this->_M_tree_ptr, __p, 2414 __p + __n, 0); 2415 _S_unref(this->_M_tree_ptr); 2416 this->_M_tree_ptr = __result; 2417 } 2418 2419 // Insert, iterator variants. 2420 iterator 2421 insert(const iterator& __p, const rope& __r) 2422 { 2423 insert(__p.index(), __r); 2424 return __p; 2425 } 2426 2427 iterator 2428 insert(const iterator& __p, size_type __n, _CharT __c) 2429 { 2430 insert(__p.index(), __n, __c); 2431 return __p; 2432 } 2433 2434 iterator insert(const iterator& __p, _CharT __c) 2435 { 2436 insert(__p.index(), __c); 2437 return __p; 2438 } 2439 2440 iterator 2441 insert(const iterator& __p ) 2442 { 2443 insert(__p.index()); 2444 return __p; 2445 } 2446 2447 iterator 2448 insert(const iterator& __p, const _CharT* c_string) 2449 { 2450 insert(__p.index(), c_string); 2451 return __p; 2452 } 2453 2454 iterator 2455 insert(const iterator& __p, const _CharT* __i, size_type __n) 2456 { 2457 insert(__p.index(), __i, __n); 2458 return __p; 2459 } 2460 2461 iterator 2462 insert(const iterator& __p, const _CharT* __i, 2463 const _CharT* __j) 2464 { 2465 insert(__p.index(), __i, __j); 2466 return __p; 2467 } 2468 2469 iterator 2470 insert(const iterator& __p, 2471 const const_iterator& __i, const const_iterator& __j) 2472 { 2473 insert(__p.index(), __i, __j); 2474 return __p; 2475 } 2476 2477 iterator 2478 insert(const iterator& __p, 2479 const iterator& __i, const iterator& __j) 2480 { 2481 insert(__p.index(), __i, __j); 2482 return __p; 2483 } 2484 2485 // Replace, range variants. 2486 void 2487 replace(const iterator& __p, const iterator& __q, const rope& __r) 2488 { replace(__p.index(), __q.index() - __p.index(), __r); } 2489 2490 void 2491 replace(const iterator& __p, const iterator& __q, _CharT __c) 2492 { replace(__p.index(), __q.index() - __p.index(), __c); } 2493 2494 void 2495 replace(const iterator& __p, const iterator& __q, 2496 const _CharT* __c_string) 2497 { replace(__p.index(), __q.index() - __p.index(), __c_string); } 2498 2499 void 2500 replace(const iterator& __p, const iterator& __q, 2501 const _CharT* __i, size_type __n) 2502 { replace(__p.index(), __q.index() - __p.index(), __i, __n); } 2503 2504 void 2505 replace(const iterator& __p, const iterator& __q, 2506 const _CharT* __i, const _CharT* __j) 2507 { replace(__p.index(), __q.index() - __p.index(), __i, __j); } 2508 2509 void 2510 replace(const iterator& __p, const iterator& __q, 2511 const const_iterator& __i, const const_iterator& __j) 2512 { replace(__p.index(), __q.index() - __p.index(), __i, __j); } 2513 2514 void 2515 replace(const iterator& __p, const iterator& __q, 2516 const iterator& __i, const iterator& __j) 2517 { replace(__p.index(), __q.index() - __p.index(), __i, __j); } 2518 2519 // Replace, iterator variants. 2520 void 2521 replace(const iterator& __p, const rope& __r) 2522 { replace(__p.index(), __r); } 2523 2524 void 2525 replace(const iterator& __p, _CharT __c) 2526 { replace(__p.index(), __c); } 2527 2528 void 2529 replace(const iterator& __p, const _CharT* __c_string) 2530 { replace(__p.index(), __c_string); } 2531 2532 void 2533 replace(const iterator& __p, const _CharT* __i, size_type __n) 2534 { replace(__p.index(), __i, __n); } 2535 2536 void 2537 replace(const iterator& __p, const _CharT* __i, const _CharT* __j) 2538 { replace(__p.index(), __i, __j); } 2539 2540 void 2541 replace(const iterator& __p, const_iterator __i, const_iterator __j) 2542 { replace(__p.index(), __i, __j); } 2543 2544 void 2545 replace(const iterator& __p, iterator __i, iterator __j) 2546 { replace(__p.index(), __i, __j); } 2547 2548 // Iterator and range variants of erase 2549 iterator 2550 erase(const iterator& __p, const iterator& __q) 2551 { 2552 size_type __p_index = __p.index(); 2553 erase(__p_index, __q.index() - __p_index); 2554 return iterator(this, __p_index); 2555 } 2556 2557 iterator 2558 erase(const iterator& __p) 2559 { 2560 size_type __p_index = __p.index(); 2561 erase(__p_index, 1); 2562 return iterator(this, __p_index); 2563 } 2564 2565 rope 2566 substr(size_type __start, size_type __len = 1) const 2567 { 2568 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr, 2569 __start, 2570 __start + __len)); 2571 } 2572 2573 rope 2574 substr(iterator __start, iterator __end) const 2575 { 2576 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr, 2577 __start.index(), 2578 __end.index())); 2579 } 2580 2581 rope 2582 substr(iterator __start) const 2583 { 2584 size_type __pos = __start.index(); 2585 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr, 2586 __pos, __pos + 1)); 2587 } 2588 2589 rope 2590 substr(const_iterator __start, const_iterator __end) const 2591 { 2592 // This might eventually take advantage of the cache in the 2593 // iterator. 2594 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr, 2595 __start.index(), 2596 __end.index())); 2597 } 2598 2599 rope<_CharT, _Alloc> 2600 substr(const_iterator __start) 2601 { 2602 size_type __pos = __start.index(); 2603 return rope<_CharT, _Alloc>(_S_substring(this->_M_tree_ptr, 2604 __pos, __pos + 1)); 2605 } 2606 2607 static const size_type npos; 2608 2609 size_type find(_CharT __c, size_type __pos = 0) const; 2610 2611 size_type 2612 find(const _CharT* __s, size_type __pos = 0) const 2613 { 2614 size_type __result_pos; 2615 const_iterator __result = 2616 std::search(const_begin() + __pos, const_end(), 2617 __s, __s + _S_char_ptr_len(__s)); 2618 __result_pos = __result.index(); 2619 #ifndef __STL_OLD_ROPE_SEMANTICS 2620 if (__result_pos == size()) 2621 __result_pos = npos; 2622 #endif 2623 return __result_pos; 2624 } 2625 2626 iterator 2627 mutable_begin() 2628 { return(iterator(this, 0)); } 2629 2630 iterator 2631 mutable_end() 2632 { return(iterator(this, size())); } 2633 2634 typedef std::reverse_iterator
reverse_iterator; 2635 2636 reverse_iterator 2637 mutable_rbegin() 2638 { return reverse_iterator(mutable_end()); } 2639 2640 reverse_iterator 2641 mutable_rend() 2642 { return reverse_iterator(mutable_begin()); } 2643 2644 reference 2645 mutable_reference_at(size_type __pos) 2646 { return reference(this, __pos); } 2647 2648 #ifdef __STD_STUFF 2649 reference 2650 operator[] (size_type __pos) 2651 { return _char_ref_proxy(this, __pos); } 2652 2653 reference 2654 at(size_type __pos) 2655 { 2656 // if (__pos >= size()) throw out_of_range; // XXX 2657 return (*this)[__pos]; 2658 } 2659 2660 void resize(size_type __n, _CharT __c) { } 2661 void resize(size_type __n) { } 2662 void reserve(size_type __res_arg = 0) { } 2663 2664 size_type 2665 capacity() const 2666 { return max_size(); } 2667 2668 // Stuff below this line is dangerous because it's error prone. 2669 // I would really like to get rid of it. 2670 // copy function with funny arg ordering. 2671 size_type 2672 copy(_CharT* __buffer, size_type __n, 2673 size_type __pos = 0) const 2674 { return copy(__pos, __n, __buffer); } 2675 2676 iterator 2677 end() 2678 { return mutable_end(); } 2679 2680 iterator 2681 begin() 2682 { return mutable_begin(); } 2683 2684 reverse_iterator 2685 rend() 2686 { return mutable_rend(); } 2687 2688 reverse_iterator 2689 rbegin() 2690 { return mutable_rbegin(); } 2691 2692 #else 2693 const_iterator 2694 end() 2695 { return const_end(); } 2696 2697 const_iterator 2698 begin() 2699 { return const_begin(); } 2700 2701 const_reverse_iterator 2702 rend() 2703 { return const_rend(); } 2704 2705 const_reverse_iterator 2706 rbegin() 2707 { return const_rbegin(); } 2708 2709 #endif 2710 }; 2711 2712 template
2713 const typename rope<_CharT, _Alloc>::size_type 2714 rope<_CharT, _Alloc>::npos = (size_type)(-1); 2715 2716 template
2717 inline bool operator==(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2718 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2719 { return (__x._M_current_pos == __y._M_current_pos 2720 && __x._M_root == __y._M_root); } 2721 2722 template
2723 inline bool operator<(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2724 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2725 { return (__x._M_current_pos < __y._M_current_pos); } 2726 2727 template
2728 inline bool operator!=(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2729 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2730 { return !(__x == __y); } 2731 2732 template
2733 inline bool operator>(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2734 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2735 { return __y < __x; } 2736 2737 template
2738 inline bool 2739 operator<=(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2740 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2741 { return !(__y < __x); } 2742 2743 template
2744 inline bool 2745 operator>=(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2746 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2747 { return !(__x < __y); } 2748 2749 template
2750 inline std::ptrdiff_t 2751 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2752 const _Rope_const_iterator<_CharT, _Alloc>& __y) 2753 { 2754 return (std::ptrdiff_t)__x._M_current_pos 2755 - (std::ptrdiff_t)__y._M_current_pos; 2756 } 2757 2758 template
2759 inline _Rope_const_iterator<_CharT, _Alloc> 2760 operator-(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2761 std::ptrdiff_t __n) 2762 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root, 2763 __x._M_current_pos - __n); } 2764 2765 template
2766 inline _Rope_const_iterator<_CharT, _Alloc> 2767 operator+(const _Rope_const_iterator<_CharT, _Alloc>& __x, 2768 std::ptrdiff_t __n) 2769 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root, 2770 __x._M_current_pos + __n); } 2771 2772 template
2773 inline _Rope_const_iterator<_CharT, _Alloc> 2774 operator+(std::ptrdiff_t __n, 2775 const _Rope_const_iterator<_CharT, _Alloc>& __x) 2776 { return _Rope_const_iterator<_CharT, _Alloc>(__x._M_root, 2777 __x._M_current_pos + __n); } 2778 2779 template
2780 inline bool 2781 operator==(const _Rope_iterator<_CharT, _Alloc>& __x, 2782 const _Rope_iterator<_CharT, _Alloc>& __y) 2783 {return (__x._M_current_pos == __y._M_current_pos 2784 && __x._M_root_rope == __y._M_root_rope); } 2785 2786 template
2787 inline bool 2788 operator<(const _Rope_iterator<_CharT, _Alloc>& __x, 2789 const _Rope_iterator<_CharT, _Alloc>& __y) 2790 { return (__x._M_current_pos < __y._M_current_pos); } 2791 2792 template
2793 inline bool 2794 operator!=(const _Rope_iterator<_CharT, _Alloc>& __x, 2795 const _Rope_iterator<_CharT, _Alloc>& __y) 2796 { return !(__x == __y); } 2797 2798 template
2799 inline bool 2800 operator>(const _Rope_iterator<_CharT, _Alloc>& __x, 2801 const _Rope_iterator<_CharT, _Alloc>& __y) 2802 { return __y < __x; } 2803 2804 template
2805 inline bool 2806 operator<=(const _Rope_iterator<_CharT, _Alloc>& __x, 2807 const _Rope_iterator<_CharT, _Alloc>& __y) 2808 { return !(__y < __x); } 2809 2810 template
2811 inline bool 2812 operator>=(const _Rope_iterator<_CharT, _Alloc>& __x, 2813 const _Rope_iterator<_CharT, _Alloc>& __y) 2814 { return !(__x < __y); } 2815 2816 template
2817 inline std::ptrdiff_t 2818 operator-(const _Rope_iterator<_CharT, _Alloc>& __x, 2819 const _Rope_iterator<_CharT, _Alloc>& __y) 2820 { return ((std::ptrdiff_t)__x._M_current_pos 2821 - (std::ptrdiff_t)__y._M_current_pos); } 2822 2823 template
2824 inline _Rope_iterator<_CharT, _Alloc> 2825 operator-(const _Rope_iterator<_CharT, _Alloc>& __x, 2826 std::ptrdiff_t __n) 2827 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope, 2828 __x._M_current_pos - __n); } 2829 2830 template
2831 inline _Rope_iterator<_CharT, _Alloc> 2832 operator+(const _Rope_iterator<_CharT, _Alloc>& __x, std::ptrdiff_t __n) 2833 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope, 2834 __x._M_current_pos + __n); } 2835 2836 template
2837 inline _Rope_iterator<_CharT, _Alloc> 2838 operator+(std::ptrdiff_t __n, const _Rope_iterator<_CharT, _Alloc>& __x) 2839 { return _Rope_iterator<_CharT, _Alloc>(__x._M_root_rope, 2840 __x._M_current_pos + __n); } 2841 2842 template
2843 inline rope<_CharT, _Alloc> 2844 operator+(const rope<_CharT, _Alloc>& __left, 2845 const rope<_CharT, _Alloc>& __right) 2846 { 2847 // Inlining this should make it possible to keep __left and 2848 // __right in registers. 2849 typedef rope<_CharT, _Alloc> rope_type; 2850 return rope_type(rope_type::_S_concat(__left._M_tree_ptr, 2851 __right._M_tree_ptr)); 2852 } 2853 2854 template
2855 inline rope<_CharT, _Alloc>& 2856 operator+=(rope<_CharT, _Alloc>& __left, 2857 const rope<_CharT, _Alloc>& __right) 2858 { 2859 __left.append(__right); 2860 return __left; 2861 } 2862 2863 template
2864 inline rope<_CharT, _Alloc> 2865 operator+(const rope<_CharT, _Alloc>& __left, 2866 const _CharT* __right) 2867 { 2868 typedef rope<_CharT, _Alloc> rope_type; 2869 std::size_t __rlen = rope_type::_S_char_ptr_len(__right); 2870 _Alloc __a = __left.get_allocator(); 2871 return rope_type(rope_type::_S_concat_char_iter(__left._M_tree_ptr, 2872 __right, __rlen, __a)); 2873 } 2874 2875 template
2876 inline rope<_CharT, _Alloc>& 2877 operator+=(rope<_CharT, _Alloc>& __left, 2878 const _CharT* __right) 2879 { 2880 __left.append(__right); 2881 return __left; 2882 } 2883 2884 template
2885 inline rope<_CharT, _Alloc> 2886 operator+(const rope<_CharT, _Alloc>& __left, _CharT __right) 2887 { 2888 typedef rope<_CharT, _Alloc> rope_type; 2889 _Alloc __a = __left.get_allocator(); 2890 return rope_type(rope_type::_S_concat_char_iter(__left._M_tree_ptr, 2891 &__right, 1, __a)); 2892 } 2893 2894 template
2895 inline rope<_CharT, _Alloc>& 2896 operator+=(rope<_CharT, _Alloc>& __left, _CharT __right) 2897 { 2898 __left.append(__right); 2899 return __left; 2900 } 2901 2902 template
2903 bool 2904 operator<(const rope<_CharT, _Alloc>& __left, 2905 const rope<_CharT, _Alloc>& __right) 2906 { return __left.compare(__right) < 0; } 2907 2908 template
2909 bool 2910 operator==(const rope<_CharT, _Alloc>& __left, 2911 const rope<_CharT, _Alloc>& __right) 2912 { return __left.compare(__right) == 0; } 2913 2914 template
2915 inline bool 2916 operator==(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x, 2917 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y) 2918 { return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root); } 2919 2920 template
2921 inline bool 2922 operator!=(const rope<_CharT, _Alloc>& __x, 2923 const rope<_CharT, _Alloc>& __y) 2924 { return !(__x == __y); } 2925 2926 template
2927 inline bool 2928 operator>(const rope<_CharT, _Alloc>& __x, 2929 const rope<_CharT, _Alloc>& __y) 2930 { return __y < __x; } 2931 2932 template
2933 inline bool 2934 operator<=(const rope<_CharT, _Alloc>& __x, 2935 const rope<_CharT, _Alloc>& __y) 2936 { return !(__y < __x); } 2937 2938 template
2939 inline bool 2940 operator>=(const rope<_CharT, _Alloc>& __x, 2941 const rope<_CharT, _Alloc>& __y) 2942 { return !(__x < __y); } 2943 2944 template
2945 inline bool 2946 operator!=(const _Rope_char_ptr_proxy<_CharT, _Alloc>& __x, 2947 const _Rope_char_ptr_proxy<_CharT, _Alloc>& __y) 2948 { return !(__x == __y); } 2949 2950 template
2951 std::basic_ostream<_CharT, _Traits>& 2952 operator<<(std::basic_ostream<_CharT, _Traits>& __o, 2953 const rope<_CharT, _Alloc>& __r); 2954 2955 typedef rope
crope; 2956 typedef rope
wrope; 2957 2958 inline crope::reference 2959 __mutable_reference_at(crope& __c, std::size_t __i) 2960 { return __c.mutable_reference_at(__i); } 2961 2962 inline wrope::reference 2963 __mutable_reference_at(wrope& __c, std::size_t __i) 2964 { return __c.mutable_reference_at(__i); } 2965 2966 template
2967 inline void 2968 swap(rope<_CharT, _Alloc>& __x, rope<_CharT, _Alloc>& __y) 2969 { __x.swap(__y); } 2970 2971 _GLIBCXX_END_NAMESPACE_VERSION 2972 } // namespace 2973 2974 2975 namespace std _GLIBCXX_VISIBILITY(default) 2976 { 2977 _GLIBCXX_BEGIN_NAMESPACE_VERSION 2978 2979 namespace tr1 2980 { 2981 template<> 2982 struct hash<__gnu_cxx::crope> 2983 { 2984 size_t 2985 operator()(const __gnu_cxx::crope& __str) const 2986 { 2987 size_t __size = __str.size(); 2988 if (0 == __size) 2989 return 0; 2990 return 13 * __str[0] + 5 * __str[__size - 1] + __size; 2991 } 2992 }; 2993 2994 2995 template<> 2996 struct hash<__gnu_cxx::wrope> 2997 { 2998 size_t 2999 operator()(const __gnu_cxx::wrope& __str) const 3000 { 3001 size_t __size = __str.size(); 3002 if (0 == __size) 3003 return 0; 3004 return 13 * __str[0] + 5 * __str[__size - 1] + __size; 3005 } 3006 }; 3007 } // namespace tr1 3008 3009 _GLIBCXX_END_NAMESPACE_VERSION 3010 } // namespace std 3011 3012 # include
3013 3014 #endif
Contact us
|
About us
|
Term of use
|
Copyright © 2000-2025 MyWebUniversity.com ™