3 * Silicon Graphics Computer Systems, Inc.
5 * Permission to use, copy, modify, distribute and sell this software
6 * and its documentation for any purpose is hereby granted without fee,
7 * provided that the above copyright notice appear in all copies and
8 * that both that copyright notice and this permission notice appear
9 * in supporting documentation. Silicon Graphics makes no
10 * representations about the suitability of this software for any
11 * purpose. It is provided "as is" without express or implied warranty.
14 /* NOTE: This is an internal header file, included by other STL headers.
15 * You should not attempt to use it directly.
18 # include <stdio.h> /* XXX should use <cstdio> */
19 # include <iostream.h> /* XXX should use <iostream> */
23 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
27 // Set buf_start, buf_end, and buf_ptr appropriately, filling tmp_buf
28 // if necessary. Assumes _M_path_end[leaf_index] and leaf_pos are correct.
29 // Results in a valid buf_ptr if the iterator can be legitimately
31 template <class _CharT
, class _Alloc
>
32 void _Rope_iterator_base
<_CharT
,_Alloc
>::_S_setbuf(
33 _Rope_iterator_base
<_CharT
,_Alloc
>& __x
)
35 const _RopeRep
* __leaf
= __x
._M_path_end
[__x
._M_leaf_index
];
36 size_t __leaf_pos
= __x
._M_leaf_pos
;
37 size_t __pos
= __x
._M_current_pos
;
39 switch(__leaf
->_M_tag
) {
40 case _RopeRep::_S_leaf
:
42 ((_Rope_RopeLeaf
<_CharT
,_Alloc
>*)__leaf
)->_M_data
;
43 __x
._M_buf_ptr
= __x
._M_buf_start
+ (__pos
- __leaf_pos
);
44 __x
._M_buf_end
= __x
._M_buf_start
+ __leaf
->_M_size
;
46 case _RopeRep::_S_function
:
47 case _RopeRep::_S_substringfn
:
49 size_t __len
= _S_iterator_buf_len
;
50 size_t __buf_start_pos
= __leaf_pos
;
51 size_t __leaf_end
= __leaf_pos
+ __leaf
->_M_size
;
52 char_producer
<_CharT
>* __fn
=
53 ((_Rope_RopeFunction
<_CharT
,_Alloc
>*)__leaf
)->_M_fn
;
55 if (__buf_start_pos
+ __len
<= __pos
) {
56 __buf_start_pos
= __pos
- __len
/4;
57 if (__buf_start_pos
+ __len
> __leaf_end
) {
58 __buf_start_pos
= __leaf_end
- __len
;
61 if (__buf_start_pos
+ __len
> __leaf_end
) {
62 __len
= __leaf_end
- __buf_start_pos
;
64 (*__fn
)(__buf_start_pos
- __leaf_pos
, __len
, __x
._M_tmp_buf
);
65 __x
._M_buf_ptr
= __x
._M_tmp_buf
+ (__pos
- __buf_start_pos
);
66 __x
._M_buf_start
= __x
._M_tmp_buf
;
67 __x
._M_buf_end
= __x
._M_tmp_buf
+ __len
;
75 // Set path and buffer inside a rope iterator. We assume that
76 // pos and root are already set.
77 template <class _CharT
, class _Alloc
>
78 void _Rope_iterator_base
<_CharT
,_Alloc
>::_S_setcache
79 (_Rope_iterator_base
<_CharT
,_Alloc
>& __x
)
81 const _RopeRep
* __path
[_RopeRep::_S_max_rope_depth
+1];
82 const _RopeRep
* __curr_rope
;
83 int __curr_depth
= -1; /* index into path */
84 size_t __curr_start_pos
= 0;
85 size_t __pos
= __x
._M_current_pos
;
86 unsigned char __dirns
= 0; // Bit vector marking right turns in the path
88 __stl_assert(__pos
<= __x
._M_root
->_M_size
);
89 if (__pos
>= __x
._M_root
->_M_size
) {
93 __curr_rope
= __x
._M_root
;
94 if (0 != __curr_rope
->_M_c_string
) {
95 /* Treat the root as a leaf. */
96 __x
._M_buf_start
= __curr_rope
->_M_c_string
;
97 __x
._M_buf_end
= __curr_rope
->_M_c_string
+ __curr_rope
->_M_size
;
98 __x
._M_buf_ptr
= __curr_rope
->_M_c_string
+ __pos
;
99 __x
._M_path_end
[0] = __curr_rope
;
100 __x
._M_leaf_index
= 0;
106 __stl_assert(__curr_depth
<= _RopeRep::_S_max_rope_depth
);
107 __path
[__curr_depth
] = __curr_rope
;
108 switch(__curr_rope
->_M_tag
) {
109 case _RopeRep::_S_leaf
:
110 case _RopeRep::_S_function
:
111 case _RopeRep::_S_substringfn
:
112 __x
._M_leaf_pos
= __curr_start_pos
;
114 case _RopeRep::_S_concat
:
116 _Rope_RopeConcatenation
<_CharT
,_Alloc
>* __c
=
117 (_Rope_RopeConcatenation
<_CharT
,_Alloc
>*)__curr_rope
;
118 _RopeRep
* __left
= __c
->_M_left
;
119 size_t __left_len
= __left
->_M_size
;
122 if (__pos
>= __curr_start_pos
+ __left_len
) {
124 __curr_rope
= __c
->_M_right
;
125 __curr_start_pos
+= __left_len
;
127 __curr_rope
= __left
;
134 // Copy last section of path into _M_path_end.
137 int __j
= __curr_depth
+ 1 - _S_path_cache_len
;
139 if (__j
< 0) __j
= 0;
140 while (__j
<= __curr_depth
) {
141 __x
._M_path_end
[++__i
] = __path
[__j
++];
143 __x
._M_leaf_index
= __i
;
145 __x
._M_path_directions
= __dirns
;
149 // Specialized version of the above. Assumes that
150 // the path cache is valid for the previous position.
151 template <class _CharT
, class _Alloc
>
152 void _Rope_iterator_base
<_CharT
,_Alloc
>::_S_setcache_for_incr
153 (_Rope_iterator_base
<_CharT
,_Alloc
>& __x
)
155 int __current_index
= __x
._M_leaf_index
;
156 const _RopeRep
* __current_node
= __x
._M_path_end
[__current_index
];
157 size_t __len
= __current_node
->_M_size
;
158 size_t __node_start_pos
= __x
._M_leaf_pos
;
159 unsigned char __dirns
= __x
._M_path_directions
;
160 _Rope_RopeConcatenation
<_CharT
,_Alloc
>* __c
;
162 __stl_assert(__x
._M_current_pos
<= __x
._M_root
->_M_size
);
163 if (__x
._M_current_pos
- __node_start_pos
< __len
) {
164 /* More stuff in this leaf, we just didn't cache it. */
168 __stl_assert(__node_start_pos
+ __len
== __x
._M_current_pos
);
169 // node_start_pos is starting position of last_node.
170 while (--__current_index
>= 0) {
171 if (!(__dirns
& 1) /* Path turned left */)
173 __current_node
= __x
._M_path_end
[__current_index
];
174 __c
= (_Rope_RopeConcatenation
<_CharT
,_Alloc
>*)__current_node
;
175 // Otherwise we were in the right child. Thus we should pop
176 // the concatenation node.
177 __node_start_pos
-= __c
->_M_left
->_M_size
;
180 if (__current_index
< 0) {
181 // We underflowed the cache. Punt.
185 __current_node
= __x
._M_path_end
[__current_index
];
186 __c
= (_Rope_RopeConcatenation
<_CharT
,_Alloc
>*)__current_node
;
187 // current_node is a concatenation node. We are positioned on the first
188 // character in its right child.
189 // node_start_pos is starting position of current_node.
190 __node_start_pos
+= __c
->_M_left
->_M_size
;
191 __current_node
= __c
->_M_right
;
192 __x
._M_path_end
[++__current_index
] = __current_node
;
194 while (_RopeRep::_S_concat
== __current_node
->_M_tag
) {
196 if (_S_path_cache_len
== __current_index
) {
198 for (__i
= 0; __i
< _S_path_cache_len
-1; __i
++) {
199 __x
._M_path_end
[__i
] = __x
._M_path_end
[__i
+1];
204 ((_Rope_RopeConcatenation
<_CharT
,_Alloc
>*)__current_node
)->_M_left
;
205 __x
._M_path_end
[__current_index
] = __current_node
;
207 // node_start_pos is unchanged.
209 __x
._M_leaf_index
= __current_index
;
210 __x
._M_leaf_pos
= __node_start_pos
;
211 __x
._M_path_directions
= __dirns
;
215 template <class _CharT
, class _Alloc
>
216 void _Rope_iterator_base
<_CharT
,_Alloc
>::_M_incr(size_t __n
) {
217 _M_current_pos
+= __n
;
218 if (0 != _M_buf_ptr
) {
219 size_t __chars_left
= _M_buf_end
- _M_buf_ptr
;
220 if (__chars_left
> __n
) {
222 } else if (__chars_left
== __n
) {
224 _S_setcache_for_incr(*this);
231 template <class _CharT
, class _Alloc
>
232 void _Rope_iterator_base
<_CharT
,_Alloc
>::_M_decr(size_t __n
) {
233 if (0 != _M_buf_ptr
) {
234 size_t __chars_left
= _M_buf_ptr
- _M_buf_start
;
235 if (__chars_left
>= __n
) {
241 _M_current_pos
-= __n
;
244 template <class _CharT
, class _Alloc
>
245 void _Rope_iterator
<_CharT
,_Alloc
>::_M_check() {
246 if (_M_root_rope
->_M_tree_ptr
!= _M_root
) {
247 // _Rope was modified. Get things fixed up.
248 _RopeRep::_S_unref(_M_root
);
249 _M_root
= _M_root_rope
->_M_tree_ptr
;
250 _RopeRep::_S_ref(_M_root
);
255 template <class _CharT
, class _Alloc
>
257 _Rope_const_iterator
<_CharT
, _Alloc
>::_Rope_const_iterator(
258 const _Rope_iterator
<_CharT
,_Alloc
>& __x
)
259 : _Rope_iterator_base
<_CharT
,_Alloc
>(__x
)
262 template <class _CharT
, class _Alloc
>
263 inline _Rope_iterator
<_CharT
,_Alloc
>::_Rope_iterator(
264 rope
<_CharT
,_Alloc
>& __r
, size_t __pos
)
265 : _Rope_iterator_base
<_CharT
,_Alloc
>(__r
._M_tree_ptr
, __pos
),
268 _RopeRep::_S_ref(_M_root
);
271 template <class _CharT
, class _Alloc
>
273 rope
<_CharT
,_Alloc
>::_S_char_ptr_len(const _CharT
* __s
)
275 const _CharT
* __p
= __s
;
277 while (!_S_is0(*__p
)) { ++__p
; }
284 template <class _CharT
, class _Alloc
>
285 inline void _Rope_RopeRep
<_CharT
,_Alloc
>::_M_free_c_string()
287 _CharT
* __cstr
= _M_c_string
;
289 size_t __size
= _M_size
+ 1;
290 destroy(__cstr
, __cstr
+ __size
);
291 _Data_deallocate(__cstr
, __size
);
296 template <class _CharT
, class _Alloc
>
297 #ifdef __STL_USE_STD_ALLOCATORS
298 inline void _Rope_RopeRep
<_CharT
,_Alloc
>::_S_free_string(_CharT
* __s
,
302 inline void _Rope_RopeRep
<_CharT
,_Alloc
>::_S_free_string(_CharT
* __s
,
306 if (!_S_is_basic_char_type((_CharT
*)0)) {
307 destroy(__s
, __s
+ __n
);
309 // This has to be a static member, so this gets a bit messy
310 # ifdef __STL_USE_STD_ALLOCATORS
312 __s
, _Rope_RopeLeaf
<_CharT
,_Alloc
>::_S_rounded_up_size(__n
));
315 __s
, _Rope_RopeLeaf
<_CharT
,_Alloc
>::_S_rounded_up_size(__n
));
320 // There are several reasons for not doing this with virtual destructors
321 // and a class specific delete operator:
322 // - A class specific delete operator can't easily get access to
323 // allocator instances if we need them.
324 // - Any virtual function would need a 4 or byte vtable pointer;
325 // this only requires a one byte tag per object.
326 template <class _CharT
, class _Alloc
>
327 void _Rope_RopeRep
<_CharT
,_Alloc
>::_M_free_tree()
332 _Rope_RopeLeaf
<_CharT
,_Alloc
>* __l
333 = (_Rope_RopeLeaf
<_CharT
,_Alloc
>*)this;
334 __l
->_Rope_RopeLeaf
<_CharT
,_Alloc
>::~_Rope_RopeLeaf();
335 _L_deallocate(__l
, 1);
340 _Rope_RopeConcatenation
<_CharT
,_Alloc
>* __c
341 = (_Rope_RopeConcatenation
<_CharT
,_Alloc
>*)this;
342 __c
->_Rope_RopeConcatenation
<_CharT
,_Alloc
>::
343 ~_Rope_RopeConcatenation();
344 _C_deallocate(__c
, 1);
349 _Rope_RopeFunction
<_CharT
,_Alloc
>* __f
350 = (_Rope_RopeFunction
<_CharT
,_Alloc
>*)this;
351 __f
->_Rope_RopeFunction
<_CharT
,_Alloc
>::~_Rope_RopeFunction();
352 _F_deallocate(__f
, 1);
357 _Rope_RopeSubstring
<_CharT
,_Alloc
>* __ss
=
358 (_Rope_RopeSubstring
<_CharT
,_Alloc
>*)this;
359 __ss
->_Rope_RopeSubstring
<_CharT
,_Alloc
>::
360 ~_Rope_RopeSubstring();
361 _S_deallocate(__ss
, 1);
368 template <class _CharT
, class _Alloc
>
369 #ifdef __STL_USE_STD_ALLOCATORS
370 inline void _Rope_RopeRep
<_CharT
,_Alloc
>::_S_free_string
371 (const _CharT
*, size_t, allocator_type
)
373 inline void _Rope_RopeRep
<_CharT
,_Alloc
>::_S_free_string
374 (const _CharT
*, size_t)
381 // Concatenate a C string onto a leaf rope by copying the rope data.
382 // Used for short ropes.
383 template <class _CharT
, class _Alloc
>
384 rope
<_CharT
,_Alloc
>::_RopeLeaf
*
385 rope
<_CharT
,_Alloc
>::_S_leaf_concat_char_iter
386 (_RopeLeaf
* __r
, const _CharT
* __iter
, size_t __len
)
388 size_t __old_len
= __r
->_M_size
;
389 _CharT
* __new_data
= (_CharT
*)
390 _Data_allocate(_S_rounded_up_size(__old_len
+ __len
));
393 uninitialized_copy_n(__r
->_M_data
, __old_len
, __new_data
);
394 uninitialized_copy_n(__iter
, __len
, __new_data
+ __old_len
);
395 _S_cond_store_eos(__new_data
[__old_len
+ __len
]);
397 __result
= _S_new_RopeLeaf(__new_data
, __old_len
+ __len
,
398 __r
->get_allocator());
400 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__new_data
, __old_len
+ __len
,
401 __r
->get_allocator()));
406 // As above, but it's OK to clobber original if refcount is 1
407 template <class _CharT
, class _Alloc
>
408 rope
<_CharT
,_Alloc
>::_RopeLeaf
*
409 rope
<_CharT
,_Alloc
>::_S_destr_leaf_concat_char_iter
410 (_RopeLeaf
* __r
, const _CharT
* __iter
, size_t __len
)
412 __stl_assert(__r
->_M_refcount
>= 1);
413 if (__r
->_M_refcount
> 1)
414 return _S_leaf_concat_char_iter(__r
, __iter
, __len
);
415 size_t __old_len
= __r
->_M_size
;
416 if (_S_allocated_capacity(__old_len
) >= __old_len
+ __len
) {
417 // The space has been partially initialized for the standard
418 // character types. But that doesn't matter for those types.
419 uninitialized_copy_n(__iter
, __len
, __r
->_M_data
+ __old_len
);
420 if (_S_is_basic_char_type((_CharT
*)0)) {
421 _S_cond_store_eos(__r
->_M_data
[__old_len
+ __len
]);
422 __stl_assert(__r
->_M_c_string
== __r
->_M_data
);
423 } else if (__r
->_M_c_string
!= __r
->_M_data
&& 0 != __r
->_M_c_string
) {
424 __r
->_M_free_c_string();
425 __r
->_M_c_string
= 0;
427 __r
->_M_size
= __old_len
+ __len
;
428 __stl_assert(__r
->_M_refcount
== 1);
429 __r
->_M_refcount
= 2;
432 _RopeLeaf
* __result
= _S_leaf_concat_char_iter(__r
, __iter
, __len
);
433 __stl_assert(__result
->_M_refcount
== 1);
439 // Assumes left and right are not 0.
440 // Does not increment (nor decrement on exception) child reference counts.
441 // Result has ref count 1.
442 template <class _CharT
, class _Alloc
>
443 rope
<_CharT
,_Alloc
>::_RopeRep
*
444 rope
<_CharT
,_Alloc
>::_S_tree_concat (_RopeRep
* __left
, _RopeRep
* __right
)
446 _RopeConcatenation
* __result
=
447 _S_new_RopeConcatenation(__left
, __right
, __left
->get_allocator());
448 size_t __depth
= __result
->_M_depth
;
450 # ifdef __STL_USE_STD_ALLOCATORS
451 __stl_assert(__left
->get_allocator() == __right
->get_allocator());
453 if (__depth
> 20 && (__result
->_M_size
< 1000 ||
454 __depth
> _RopeRep::_S_max_rope_depth
)) {
455 _RopeRep
* __balanced
;
458 __balanced
= _S_balance(__result
);
460 if (__result
!= __balanced
) {
461 __stl_assert(1 == __result
->_M_refcount
462 && 1 == __balanced
->_M_refcount
);
465 __result
->_M_unref_nonnil();
467 __STL_UNWIND((_C_deallocate(__result
,1)));
468 // In case of exception, we need to deallocate
469 // otherwise dangling result node. But caller
470 // still owns its children. Thus unref is
478 template <class _CharT
, class _Alloc
>
479 rope
<_CharT
,_Alloc
>::_RopeRep
* rope
<_CharT
,_Alloc
>::_S_concat_char_iter
480 (_RopeRep
* __r
, const _CharT
*__s
, size_t __slen
)
488 return __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __slen
,
489 __r
->get_allocator());
490 if (_RopeRep::_S_leaf
== __r
->_M_tag
&&
491 __r
->_M_size
+ __slen
<= _S_copy_max
) {
492 __result
= _S_leaf_concat_char_iter((_RopeLeaf
*)__r
, __s
, __slen
);
494 __stl_assert(1 == __result
->_M_refcount
);
498 if (_RopeRep::_S_concat
== __r
->_M_tag
499 && _RopeRep::_S_leaf
== ((_RopeConcatenation
*)__r
)->_M_right
->_M_tag
) {
501 (_RopeLeaf
* )(((_RopeConcatenation
* )__r
)->_M_right
);
502 if (__right
->_M_size
+ __slen
<= _S_copy_max
) {
503 _RopeRep
* __left
= ((_RopeConcatenation
*)__r
)->_M_left
;
505 _S_leaf_concat_char_iter((_RopeLeaf
*)__right
, __s
, __slen
);
506 __left
->_M_ref_nonnil();
508 __result
= _S_tree_concat(__left
, __nright
);
510 __STL_UNWIND(_S_unref(__left
); _S_unref(__nright
));
512 __stl_assert(1 == __result
->_M_refcount
);
518 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __slen
, __r
->get_allocator());
520 __r
->_M_ref_nonnil();
521 __result
= _S_tree_concat(__r
, __nright
);
523 __STL_UNWIND(_S_unref(__r
); _S_unref(__nright
));
525 __stl_assert(1 == __result
->_M_refcount
);
531 template <class _CharT
, class _Alloc
>
532 rope
<_CharT
,_Alloc
>::_RopeRep
*
533 rope
<_CharT
,_Alloc
>::_S_destr_concat_char_iter(
534 _RopeRep
* __r
, const _CharT
* __s
, size_t __slen
)
538 return __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __slen
,
539 __r
->get_allocator());
540 size_t __count
= __r
->_M_refcount
;
541 size_t __orig_size
= __r
->_M_size
;
542 __stl_assert(__count
>= 1);
543 if (__count
> 1) return _S_concat_char_iter(__r
, __s
, __slen
);
545 __r
->_M_refcount
= 2; // One more than before
548 if (__orig_size
+ __slen
<= _S_copy_max
&&
549 _RopeRep::_S_leaf
== __r
->_M_tag
) {
550 __result
= _S_destr_leaf_concat_char_iter((_RopeLeaf
*)__r
, __s
, __slen
);
553 if (_RopeRep::_S_concat
== __r
->_M_tag
) {
554 _RopeLeaf
* __right
= (_RopeLeaf
*)(((_RopeConcatenation
*)__r
)->_M_right
);
555 if (_RopeRep::_S_leaf
== __right
->_M_tag
556 && __right
->_M_size
+ __slen
<= _S_copy_max
) {
557 _RopeRep
* __new_right
=
558 _S_destr_leaf_concat_char_iter(__right
, __s
, __slen
);
559 if (__right
== __new_right
) {
560 __stl_assert(__new_right
->_M_refcount
== 2);
561 __new_right
->_M_refcount
= 1;
563 __stl_assert(__new_right
->_M_refcount
>= 1);
564 __right
->_M_unref_nonnil();
566 __stl_assert(__r
->_M_refcount
== 1);
567 __r
->_M_refcount
= 2; // One more than before.
568 ((_RopeConcatenation
*)__r
)->_M_right
= __new_right
;
569 __r
->_M_size
= __orig_size
+ __slen
;
570 if (0 != __r
->_M_c_string
) {
571 __r
->_M_free_c_string();
572 __r
->_M_c_string
= 0;
578 __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s
, __slen
, __r
->get_allocator());
579 __r
->_M_ref_nonnil();
581 __result
= _S_tree_concat(__r
, __right
);
583 __STL_UNWIND(_S_unref(__r
); _S_unref(__right
))
584 __stl_assert(1 == __result
->_M_refcount
);
589 template <class _CharT
, class _Alloc
>
590 rope
<_CharT
,_Alloc
>::_RopeRep
*
591 rope
<_CharT
,_Alloc
>::_S_concat(_RopeRep
* __left
, _RopeRep
* __right
)
598 __left
->_M_ref_nonnil();
601 if (_RopeRep::_S_leaf
== __right
->_M_tag
) {
602 if (_RopeRep::_S_leaf
== __left
->_M_tag
) {
603 if (__right
->_M_size
+ __left
->_M_size
<= _S_copy_max
) {
604 return _S_leaf_concat_char_iter((_RopeLeaf
*)__left
,
605 ((_RopeLeaf
*)__right
)->_M_data
,
608 } else if (_RopeRep::_S_concat
== __left
->_M_tag
609 && _RopeRep::_S_leaf
==
610 ((_RopeConcatenation
*)__left
)->_M_right
->_M_tag
) {
611 _RopeLeaf
* __leftright
=
612 (_RopeLeaf
*)(((_RopeConcatenation
*)__left
)->_M_right
);
613 if (__leftright
->_M_size
+ __right
->_M_size
<= _S_copy_max
) {
614 _RopeRep
* __leftleft
= ((_RopeConcatenation
*)__left
)->_M_left
;
615 _RopeRep
* __rest
= _S_leaf_concat_char_iter(__leftright
,
616 ((_RopeLeaf
*)__right
)->_M_data
,
618 __leftleft
->_M_ref_nonnil();
620 return(_S_tree_concat(__leftleft
, __rest
));
622 __STL_UNWIND(_S_unref(__leftleft
); _S_unref(__rest
))
626 __left
->_M_ref_nonnil();
627 __right
->_M_ref_nonnil();
629 return(_S_tree_concat(__left
, __right
));
631 __STL_UNWIND(_S_unref(__left
); _S_unref(__right
));
634 template <class _CharT
, class _Alloc
>
635 rope
<_CharT
,_Alloc
>::_RopeRep
*
636 rope
<_CharT
,_Alloc
>::_S_substring(_RopeRep
* __base
,
637 size_t __start
, size_t __endp1
)
639 if (0 == __base
) return 0;
640 size_t __len
= __base
->_M_size
;
642 const size_t __lazy_threshold
= 128;
644 if (__endp1
>= __len
) {
646 __base
->_M_ref_nonnil();
652 __adj_endp1
= __endp1
;
654 switch(__base
->_M_tag
) {
655 case _RopeRep::_S_concat
:
657 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__base
;
658 _RopeRep
* __left
= __c
->_M_left
;
659 _RopeRep
* __right
= __c
->_M_right
;
660 size_t __left_len
= __left
->_M_size
;
663 if (__adj_endp1
<= __left_len
) {
664 return _S_substring(__left
, __start
, __endp1
);
665 } else if (__start
>= __left_len
) {
666 return _S_substring(__right
, __start
- __left_len
,
667 __adj_endp1
- __left_len
);
669 _Self_destruct_ptr
__left_result(
670 _S_substring(__left
, __start
, __left_len
));
671 _Self_destruct_ptr
__right_result(
672 _S_substring(__right
, 0, __endp1
- __left_len
));
673 __result
= _S_concat(__left_result
, __right_result
);
675 __stl_assert(1 == __result
->_M_refcount
);
679 case _RopeRep::_S_leaf
:
681 _RopeLeaf
* __l
= (_RopeLeaf
*)__base
;
684 if (__start
>= __adj_endp1
) return 0;
685 __result_len
= __adj_endp1
- __start
;
686 if (__result_len
> __lazy_threshold
) goto lazy
;
688 const _CharT
* __section
= __l
->_M_data
+ __start
;
689 __result
= _S_new_RopeLeaf(__section
, __result_len
,
690 __base
->get_allocator());
691 __result
->_M_c_string
= 0; // Not eos terminated.
693 // We should sometimes create substring node instead.
694 __result
= __STL_ROPE_FROM_UNOWNED_CHAR_PTR(
695 __l
->_M_data
+ __start
, __result_len
,
696 __base
->get_allocator());
700 case _RopeRep::_S_substringfn
:
701 // Avoid introducing multiple layers of substring nodes.
703 _RopeSubstring
* __old
= (_RopeSubstring
*)__base
;
705 if (__start
>= __adj_endp1
) return 0;
706 __result_len
= __adj_endp1
- __start
;
707 if (__result_len
> __lazy_threshold
) {
708 _RopeSubstring
* __result
=
709 _S_new_RopeSubstring(__old
->_M_base
,
710 __start
+ __old
->_M_start
,
711 __adj_endp1
- __start
,
712 __base
->get_allocator());
715 } // *** else fall through: ***
717 case _RopeRep::_S_function
:
719 _RopeFunction
* __f
= (_RopeFunction
*)__base
;
722 if (__start
>= __adj_endp1
) return 0;
723 __result_len
= __adj_endp1
- __start
;
725 if (__result_len
> __lazy_threshold
) goto lazy
;
726 __section
= (_CharT
*)
727 _Data_allocate(_S_rounded_up_size(__result_len
));
729 (*(__f
->_M_fn
))(__start
, __result_len
, __section
);
731 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(
732 __section
, __result_len
, __base
->get_allocator()));
733 _S_cond_store_eos(__section
[__result_len
]);
734 return _S_new_RopeLeaf(__section
, __result_len
,
735 __base
->get_allocator());
742 // Create substring node.
743 return _S_new_RopeSubstring(__base
, __start
, __adj_endp1
- __start
,
744 __base
->get_allocator());
748 template<class _CharT
>
749 class _Rope_flatten_char_consumer
: public _Rope_char_consumer
<_CharT
> {
753 // _CharT* _M_buffer; // XXX not used
755 _Rope_flatten_char_consumer(_CharT
* __buffer
) {
756 _M_buf_ptr
= __buffer
;
758 ~_Rope_flatten_char_consumer() {}
759 bool operator() (const _CharT
* __leaf
, size_t __n
) {
760 uninitialized_copy_n(__leaf
, __n
, _M_buf_ptr
);
766 template<class _CharT
>
767 class _Rope_find_char_char_consumer
: public _Rope_char_consumer
<_CharT
> {
771 size_t _M_count
; // Number of nonmatching characters
772 _Rope_find_char_char_consumer(_CharT __p
)
773 : _M_pattern(__p
), _M_count(0) {}
774 ~_Rope_find_char_char_consumer() {}
775 bool operator() (const _CharT
* __leaf
, size_t __n
) {
777 for (__i
= 0; __i
< __n
; __i
++) {
778 if (__leaf
[__i
] == _M_pattern
) {
779 _M_count
+= __i
; return false;
782 _M_count
+= __n
; return true;
786 template<class _CharT
>
787 class _Rope_insert_char_consumer
: public _Rope_char_consumer
<_CharT
> {
789 typedef ostream _Insert_ostream
;
790 _Insert_ostream
& _M_o
;
792 // _CharT* buffer; // XXX not used
793 _Rope_insert_char_consumer(_Insert_ostream
& __writer
)
795 ~_Rope_insert_char_consumer() { };
796 // Caller is presumed to own the ostream
797 bool operator() (const _CharT
* __leaf
, size_t __n
);
798 // Returns true to continue traversal.
801 template<class _CharT
>
802 bool _Rope_insert_char_consumer
<_CharT
>::operator()
803 (const _CharT
* __leaf
, size_t __n
)
806 // We assume that formatting is set up correctly for each element.
807 for (__i
= 0; __i
< __n
; __i
++) _M_o
<< __leaf
[__i
];
811 inline bool _Rope_insert_char_consumer
<char>::operator()
812 (const char* __leaf
, size_t __n
)
815 for (__i
= 0; __i
< __n
; __i
++) _M_o
.put(__leaf
[__i
]);
820 // I couldn't get this to work work with the VC++ version of basic_ostream.
821 // It also doesn't really do the right thing unless o is a wide stream.
822 // Given that wchar_t is often 4 bytes, its not clear to me how useful
823 // this stuff is anyway.
824 inline bool _Rope_insert_char_consumer
<wchar_t>::operator()
825 (const wchar_t* __leaf
, size_t __n
)
828 for (__i
= 0; __i
< __n
; __i
++) _M_o
.put(__leaf
[__i
]);
831 #endif /* !_MSC_VER && !BORLAND */
833 template <class _CharT
, class _Alloc
>
834 bool rope
<_CharT
, _Alloc
>::_S_apply_to_pieces(
835 _Rope_char_consumer
<_CharT
>& __c
,
837 size_t __begin
, size_t __end
)
839 if (0 == __r
) return true;
840 switch(__r
->_M_tag
) {
841 case _RopeRep::_S_concat
:
843 _RopeConcatenation
* __conc
= (_RopeConcatenation
*)__r
;
844 _RopeRep
* __left
= __conc
->_M_left
;
845 size_t __left_len
= __left
->_M_size
;
846 if (__begin
< __left_len
) {
847 size_t __left_end
= min(__left_len
, __end
);
848 if (!_S_apply_to_pieces(__c
, __left
, __begin
, __left_end
))
851 if (__end
> __left_len
) {
852 _RopeRep
* __right
= __conc
->_M_right
;
853 size_t __right_start
= max(__left_len
, __begin
);
854 if (!_S_apply_to_pieces(__c
, __right
,
855 __right_start
- __left_len
,
856 __end
- __left_len
)) {
862 case _RopeRep::_S_leaf
:
864 _RopeLeaf
* __l
= (_RopeLeaf
*)__r
;
865 return __c(__l
->_M_data
+ __begin
, __end
- __begin
);
867 case _RopeRep::_S_function
:
868 case _RopeRep::_S_substringfn
:
870 _RopeFunction
* __f
= (_RopeFunction
*)__r
;
871 size_t __len
= __end
- __begin
;
874 (_CharT
*)alloc::allocate(__len
* sizeof(_CharT
));
876 (*(__f
->_M_fn
))(__begin
, __end
, __buffer
);
877 __result
= __c(__buffer
, __len
);
878 alloc::deallocate(__buffer
, __len
* sizeof(_CharT
));
880 __STL_UNWIND((alloc::deallocate(__buffer
,
881 __len
* sizeof(_CharT
))))
891 inline void _Rope_fill(ostream
& __o
, size_t __n
)
893 char __f
= __o
.fill();
896 for (__i
= 0; __i
< __n
; __i
++) __o
.put(__f
);
900 template <class _CharT
> inline bool _Rope_is_simple(_CharT
*) { return false; }
901 inline bool _Rope_is_simple(char*) { return true; }
902 inline bool _Rope_is_simple(wchar_t*) { return true; }
905 template<class _CharT
, class _Alloc
>
906 ostream
& operator<< (ostream
& __o
, const rope
<_CharT
, _Alloc
>& __r
)
908 size_t __w
= __o
.width();
909 bool __left
= bool(__o
.flags() & ios::left
);
911 size_t __rope_len
= __r
.size();
912 _Rope_insert_char_consumer
<_CharT
> __c(__o
);
913 bool __is_simple
= _Rope_is_simple((_CharT
*)0);
915 if (__rope_len
< __w
) {
916 __pad_len
= __w
- __rope_len
;
920 if (!__is_simple
) __o
.width(__w
/__rope_len
);
922 if (__is_simple
&& !__left
&& __pad_len
> 0) {
923 _Rope_fill(__o
, __pad_len
);
925 __r
.apply_to_pieces(0, __r
.size(), __c
);
926 if (__is_simple
&& __left
&& __pad_len
> 0) {
927 _Rope_fill(__o
, __pad_len
);
932 __STL_UNWIND(if (!__is_simple
) __o
.width(__w
))
936 template <class _CharT
, class _Alloc
>
938 rope
<_CharT
,_Alloc
>::_S_flatten(_RopeRep
* __r
,
939 size_t __start
, size_t __len
,
942 _Rope_flatten_char_consumer
<_CharT
> __c(__buffer
);
943 _S_apply_to_pieces(__c
, __r
, __start
, __start
+ __len
);
944 return(__buffer
+ __len
);
947 template <class _CharT
, class _Alloc
>
949 rope
<_CharT
,_Alloc
>::find(_CharT __pattern
, size_t __start
) const
951 _Rope_find_char_char_consumer
<_CharT
> __c(__pattern
);
952 _S_apply_to_pieces(__c
, _M_tree_ptr
, __start
, size());
953 size_type __result_pos
= __start
+ __c
._M_count
;
954 # ifndef __STL_OLD_ROPE_SEMANTICS
955 if (__result_pos
== size()) __result_pos
= npos
;
960 template <class _CharT
, class _Alloc
>
962 rope
<_CharT
,_Alloc
>::_S_flatten(_RopeRep
* __r
, _CharT
* __buffer
)
964 if (0 == __r
) return __buffer
;
965 switch(__r
->_M_tag
) {
966 case _RopeRep::_S_concat
:
968 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__r
;
969 _RopeRep
* __left
= __c
->_M_left
;
970 _RopeRep
* __right
= __c
->_M_right
;
971 _CharT
* __rest
= _S_flatten(__left
, __buffer
);
972 return _S_flatten(__right
, __rest
);
974 case _RopeRep::_S_leaf
:
976 _RopeLeaf
* __l
= (_RopeLeaf
*)__r
;
977 return copy_n(__l
->_M_data
, __l
->_M_size
, __buffer
).second
;
979 case _RopeRep::_S_function
:
980 case _RopeRep::_S_substringfn
:
981 // We dont yet do anything with substring nodes.
982 // This needs to be fixed before ropefiles will work well.
984 _RopeFunction
* __f
= (_RopeFunction
*)__r
;
985 (*(__f
->_M_fn
))(0, __f
->_M_size
, __buffer
);
986 return __buffer
+ __f
->_M_size
;
996 // This needs work for _CharT != char
997 template <class _CharT
, class _Alloc
>
999 rope
<_CharT
,_Alloc
>::_S_dump(_RopeRep
* __r
, int __indent
)
1001 for (int __i
= 0; __i
< __indent
; __i
++) putchar(' ');
1003 printf("NULL\n"); return;
1005 if (_RopeRep::_S_concat
== __r
->_M_tag
) {
1006 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__r
;
1007 _RopeRep
* __left
= __c
->_M_left
;
1008 _RopeRep
* __right
= __c
->_M_right
;
1011 printf("Concatenation %p (depth = %d, len = %ld, %s balanced)\n",
1012 __r
, __r
->_M_depth
, __r
->_M_size
, __r
->_M_is_balanced
? "" : "not");
1014 printf("Concatenation %p (rc = %ld, depth = %d, "
1015 "len = %ld, %s balanced)\n",
1016 __r
, __r
->_M_refcount
, __r
->_M_depth
, __r
->_M_size
,
1017 __r
->_M_is_balanced
? "" : "not");
1019 _S_dump(__left
, __indent
+ 2);
1020 _S_dump(__right
, __indent
+ 2);
1025 switch (__r
->_M_tag
) {
1026 case _RopeRep::_S_leaf
:
1029 case _RopeRep::_S_function
:
1030 __kind
= "Function";
1032 case _RopeRep::_S_substringfn
:
1033 __kind
= "Function representing substring";
1036 __kind
= "(corrupted kind field!)";
1039 printf("%s %p (depth = %d, len = %ld) ",
1040 __kind
, __r
, __r
->_M_depth
, __r
->_M_size
);
1042 printf("%s %p (rc = %ld, depth = %d, len = %ld) ",
1043 __kind
, __r
, __r
->_M_refcount
, __r
->_M_depth
, __r
->_M_size
);
1045 if (_S_is_one_byte_char_type((_CharT
*)0)) {
1046 const int __max_len
= 40;
1047 _Self_destruct_ptr
__prefix(_S_substring(__r
, 0, __max_len
));
1048 _CharT __buffer
[__max_len
+ 1];
1049 bool __too_big
= __r
->_M_size
> __prefix
->_M_size
;
1051 _S_flatten(__prefix
, __buffer
);
1052 __buffer
[__prefix
->_M_size
] = _S_eos((_CharT
*)0);
1054 (char*)__buffer
, __too_big
? "...\n" : "\n");
1061 template <class _CharT
, class _Alloc
>
1063 rope
<_CharT
,_Alloc
>::_S_min_len
[
1064 _Rope_RopeRep
<_CharT
,_Alloc
>::_S_max_rope_depth
+ 1] = {
1065 /* 0 */1, /* 1 */2, /* 2 */3, /* 3 */5, /* 4 */8, /* 5 */13, /* 6 */21,
1066 /* 7 */34, /* 8 */55, /* 9 */89, /* 10 */144, /* 11 */233, /* 12 */377,
1067 /* 13 */610, /* 14 */987, /* 15 */1597, /* 16 */2584, /* 17 */4181,
1068 /* 18 */6765, /* 19 */10946, /* 20 */17711, /* 21 */28657, /* 22 */46368,
1069 /* 23 */75025, /* 24 */121393, /* 25 */196418, /* 26 */317811,
1070 /* 27 */514229, /* 28 */832040, /* 29 */1346269, /* 30 */2178309,
1071 /* 31 */3524578, /* 32 */5702887, /* 33 */9227465, /* 34 */14930352,
1072 /* 35 */24157817, /* 36 */39088169, /* 37 */63245986, /* 38 */102334155,
1073 /* 39 */165580141, /* 40 */267914296, /* 41 */433494437,
1074 /* 42 */701408733, /* 43 */1134903170, /* 44 */1836311903,
1075 /* 45 */2971215073u };
1076 // These are Fibonacci numbers < 2**32.
1078 template <class _CharT
, class _Alloc
>
1079 rope
<_CharT
,_Alloc
>::_RopeRep
*
1080 rope
<_CharT
,_Alloc
>::_S_balance(_RopeRep
* __r
)
1082 _RopeRep
* __forest
[_RopeRep::_S_max_rope_depth
+ 1];
1083 _RopeRep
* __result
= 0;
1086 // The concatenation of forest in descending order is equal to __r.
1087 // __forest[__i]._M_size >= _S_min_len[__i]
1088 // __forest[__i]._M_depth = __i
1089 // References from forest are included in refcount.
1091 for (__i
= 0; __i
<= _RopeRep::_S_max_rope_depth
; ++__i
)
1094 _S_add_to_forest(__r
, __forest
);
1095 for (__i
= 0; __i
<= _RopeRep::_S_max_rope_depth
; ++__i
)
1096 if (0 != __forest
[__i
]) {
1098 _Self_destruct_ptr
__old(__result
);
1100 __result
= _S_concat(__forest
[__i
], __result
);
1101 __forest
[__i
]->_M_unref_nonnil();
1102 # if !defined(__GC) && defined(__STL_USE_EXCEPTIONS)
1107 __STL_UNWIND(for(__i
= 0; __i
<= _RopeRep::_S_max_rope_depth
; __i
++)
1108 _S_unref(__forest
[__i
]))
1109 if (__result
->_M_depth
> _RopeRep::_S_max_rope_depth
) abort();
1114 template <class _CharT
, class _Alloc
>
1116 rope
<_CharT
,_Alloc
>::_S_add_to_forest(_RopeRep
* __r
, _RopeRep
** __forest
)
1118 if (__r
->_M_is_balanced
) {
1119 _S_add_leaf_to_forest(__r
, __forest
);
1122 __stl_assert(__r
->_M_tag
== _RopeRep::_S_concat
);
1124 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__r
;
1126 _S_add_to_forest(__c
->_M_left
, __forest
);
1127 _S_add_to_forest(__c
->_M_right
, __forest
);
1132 template <class _CharT
, class _Alloc
>
1134 rope
<_CharT
,_Alloc
>::_S_add_leaf_to_forest(_RopeRep
* __r
, _RopeRep
** __forest
)
1136 _RopeRep
* __insertee
; // included in refcount
1137 _RopeRep
* __too_tiny
= 0; // included in refcount
1138 int __i
; // forest[0..__i-1] is empty
1139 size_t __s
= __r
->_M_size
;
1141 for (__i
= 0; __s
>= _S_min_len
[__i
+1]/* not this bucket */; ++__i
) {
1142 if (0 != __forest
[__i
]) {
1144 _Self_destruct_ptr
__old(__too_tiny
);
1146 __too_tiny
= _S_concat_and_set_balanced(__forest
[__i
], __too_tiny
);
1147 __forest
[__i
]->_M_unref_nonnil();
1153 _Self_destruct_ptr
__old(__too_tiny
);
1155 __insertee
= _S_concat_and_set_balanced(__too_tiny
, __r
);
1157 // Too_tiny dead, and no longer included in refcount.
1158 // Insertee is live and included.
1159 __stl_assert(_S_is_almost_balanced(__insertee
));
1160 __stl_assert(__insertee
->_M_depth
<= __r
->_M_depth
+ 1);
1162 if (0 != __forest
[__i
]) {
1164 _Self_destruct_ptr
__old(__insertee
);
1166 __insertee
= _S_concat_and_set_balanced(__forest
[__i
], __insertee
);
1167 __forest
[__i
]->_M_unref_nonnil();
1169 __stl_assert(_S_is_almost_balanced(__insertee
));
1171 __stl_assert(_S_min_len
[__i
] <= __insertee
->_M_size
);
1172 __stl_assert(__forest
[__i
] == 0);
1173 if (__i
== _RopeRep::_S_max_rope_depth
||
1174 __insertee
->_M_size
< _S_min_len
[__i
+1]) {
1175 __forest
[__i
] = __insertee
;
1176 // refcount is OK since __insertee is now dead.
1182 template <class _CharT
, class _Alloc
>
1184 rope
<_CharT
,_Alloc
>::_S_fetch(_RopeRep
* __r
, size_type __i
)
1186 __GC_CONST _CharT
* __cstr
= __r
->_M_c_string
;
1188 __stl_assert(__i
< __r
->_M_size
);
1189 if (0 != __cstr
) return __cstr
[__i
];
1191 switch(__r
->_M_tag
) {
1192 case _RopeRep::_S_concat
:
1194 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__r
;
1195 _RopeRep
* __left
= __c
->_M_left
;
1196 size_t __left_len
= __left
->_M_size
;
1198 if (__i
>= __left_len
) {
1200 __r
= __c
->_M_right
;
1206 case _RopeRep::_S_leaf
:
1208 _RopeLeaf
* __l
= (_RopeLeaf
*)__r
;
1209 return __l
->_M_data
[__i
];
1211 case _RopeRep::_S_function
:
1212 case _RopeRep::_S_substringfn
:
1214 _RopeFunction
* __f
= (_RopeFunction
*)__r
;
1217 (*(__f
->_M_fn
))(__i
, 1, &__result
);
1225 // Return a uniquely referenced character slot for the given
1226 // position, or 0 if that's not possible.
1227 template <class _CharT
, class _Alloc
>
1229 rope
<_CharT
,_Alloc
>::_S_fetch_ptr(_RopeRep
* __r
, size_type __i
)
1231 _RopeRep
* __clrstack
[_RopeRep::_S_max_rope_depth
];
1235 if (__r
->_M_refcount
> 1) return 0;
1236 switch(__r
->_M_tag
) {
1237 case _RopeRep::_S_concat
:
1239 _RopeConcatenation
* __c
= (_RopeConcatenation
*)__r
;
1240 _RopeRep
* __left
= __c
->_M_left
;
1241 size_t __left_len
= __left
->_M_size
;
1243 if (__c
->_M_c_string
!= 0) __clrstack
[__csptr
++] = __c
;
1244 if (__i
>= __left_len
) {
1246 __r
= __c
->_M_right
;
1252 case _RopeRep::_S_leaf
:
1254 _RopeLeaf
* __l
= (_RopeLeaf
*)__r
;
1255 if (__l
->_M_c_string
!= __l
->_M_data
&& __l
->_M_c_string
!= 0)
1256 __clrstack
[__csptr
++] = __l
;
1257 while (__csptr
> 0) {
1259 _RopeRep
* __d
= __clrstack
[__csptr
];
1260 __d
->_M_free_c_string();
1261 __d
->_M_c_string
= 0;
1263 return __l
->_M_data
+ __i
;
1265 case _RopeRep::_S_function
:
1266 case _RopeRep::_S_substringfn
:
1273 // The following could be implemented trivially using
1274 // lexicographical_compare_3way.
1275 // We do a little more work to avoid dealing with rope iterators for
1277 template <class _CharT
, class _Alloc
>
1279 rope
<_CharT
,_Alloc
>::_S_compare (const _RopeRep
* __left
,
1280 const _RopeRep
* __right
)
1285 if (0 == __right
) return 0 != __left
;
1286 if (0 == __left
) return -1;
1287 __left_len
= __left
->_M_size
;
1288 __right_len
= __right
->_M_size
;
1289 if (_RopeRep::_S_leaf
== __left
->_M_tag
) {
1290 _RopeLeaf
* __l
= (_RopeLeaf
*) __left
;
1291 if (_RopeRep::_S_leaf
== __right
->_M_tag
) {
1292 _RopeLeaf
* __r
= (_RopeLeaf
*) __right
;
1293 return lexicographical_compare_3way(
1294 __l
->_M_data
, __l
->_M_data
+ __left_len
,
1295 __r
->_M_data
, __r
->_M_data
+ __right_len
);
1297 const_iterator
__rstart(__right
, 0);
1298 const_iterator
__rend(__right
, __right_len
);
1299 return lexicographical_compare_3way(
1300 __l
->_M_data
, __l
->_M_data
+ __left_len
,
1304 const_iterator
__lstart(__left
, 0);
1305 const_iterator
__lend(__left
, __left_len
);
1306 if (_RopeRep::_S_leaf
== __right
->_M_tag
) {
1307 _RopeLeaf
* __r
= (_RopeLeaf
*) __right
;
1308 return lexicographical_compare_3way(
1310 __r
->_M_data
, __r
->_M_data
+ __right_len
);
1312 const_iterator
__rstart(__right
, 0);
1313 const_iterator
__rend(__right
, __right_len
);
1314 return lexicographical_compare_3way(
1321 // Assignment to reference proxies.
1322 template <class _CharT
, class _Alloc
>
1323 _Rope_char_ref_proxy
<_CharT
, _Alloc
>&
1324 _Rope_char_ref_proxy
<_CharT
, _Alloc
>::operator= (_CharT __c
) {
1325 _RopeRep
* __old
= _M_root
->_M_tree_ptr
;
1327 // First check for the case in which everything is uniquely
1328 // referenced. In that case we can do this destructively.
1329 _CharT
* __ptr
= _My_rope::_S_fetch_ptr(__old
, _M_pos
);
1335 _Self_destruct_ptr
__left(
1336 _My_rope::_S_substring(__old
, 0, _M_pos
));
1337 _Self_destruct_ptr
__right(
1338 _My_rope::_S_substring(__old
, _M_pos
+1, __old
->_M_size
));
1339 _Self_destruct_ptr
__result_left(
1340 _My_rope::_S_destr_concat_char_iter(__left
, &__c
, 1));
1343 __stl_assert(__left
== __result_left
|| 1 == __result_left
->_M_refcount
);
1345 _RopeRep
* __result
=
1346 _My_rope::_S_concat(__result_left
, __right
);
1348 __stl_assert(1 <= __result
->_M_refcount
);
1349 _RopeRep::_S_unref(__old
);
1351 _M_root
->_M_tree_ptr
= __result
;
1355 template <class _CharT
, class _Alloc
>
1356 inline _Rope_char_ref_proxy
<_CharT
, _Alloc
>::operator _CharT () const
1358 if (_M_current_valid
) {
1361 return _My_rope::_S_fetch(_M_root
->_M_tree_ptr
, _M_pos
);
1364 template <class _CharT
, class _Alloc
>
1365 _Rope_char_ptr_proxy
<_CharT
, _Alloc
>
1366 _Rope_char_ref_proxy
<_CharT
, _Alloc
>::operator& () const {
1367 return _Rope_char_ptr_proxy
<_CharT
, _Alloc
>(*this);
1370 template <class _CharT
, class _Alloc
>
1371 rope
<_CharT
, _Alloc
>::rope(size_t __n
, _CharT __c
,
1372 const allocator_type
& __a
)
1375 rope
<_CharT
,_Alloc
> __result
;
1376 const size_t __exponentiate_threshold
= 32;
1379 _CharT
* __rest_buffer
;
1380 _RopeRep
* __remainder
;
1381 rope
<_CharT
,_Alloc
> __remainder_rope
;
1386 __exponent
= __n
/ __exponentiate_threshold
;
1387 __rest
= __n
% __exponentiate_threshold
;
1391 __rest_buffer
= _Data_allocate(_S_rounded_up_size(__rest
));
1392 uninitialized_fill_n(__rest_buffer
, __rest
, __c
);
1393 _S_cond_store_eos(__rest_buffer
[__rest
]);
1395 __remainder
= _S_new_RopeLeaf(__rest_buffer
, __rest
, __a
);
1397 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__rest_buffer
, __rest
, __a
))
1399 __remainder_rope
._M_tree_ptr
= __remainder
;
1400 if (__exponent
!= 0) {
1401 _CharT
* __base_buffer
=
1402 _Data_allocate(_S_rounded_up_size(__exponentiate_threshold
));
1403 _RopeLeaf
* __base_leaf
;
1405 uninitialized_fill_n(__base_buffer
, __exponentiate_threshold
, __c
);
1406 _S_cond_store_eos(__base_buffer
[__exponentiate_threshold
]);
1408 __base_leaf
= _S_new_RopeLeaf(__base_buffer
,
1409 __exponentiate_threshold
, __a
);
1411 __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__base_buffer
,
1412 __exponentiate_threshold
, __a
))
1413 __base_rope
._M_tree_ptr
= __base_leaf
;
1414 if (1 == __exponent
) {
1415 __result
= __base_rope
;
1417 __stl_assert(2 == __result
._M_tree_ptr
->_M_refcount
);
1418 // One each for base_rope and __result
1421 // XXX what is power()?
1422 __result
= power(__base_rope
, __exponent
, _Concat_fn());
1424 if (0 != __remainder
) {
1425 __result
+= __remainder_rope
;
1428 __result
= __remainder_rope
;
1430 _M_tree_ptr
= __result
._M_tree_ptr
;
1431 _M_tree_ptr
->_M_ref_nonnil();
1434 template<class _CharT
, class _Alloc
>
1435 _CharT rope
<_CharT
,_Alloc
>::_S_empty_c_str
[1];
1437 # ifdef __STL_PTHREADS
1438 template<class _CharT
, class _Alloc
>
1440 rope
<_CharT
,_Alloc
>::_S_swap_lock
= PTHREAD_MUTEX_INITIALIZER
;
1443 template<class _CharT
, class _Alloc
>
1444 const _CharT
* rope
<_CharT
,_Alloc
>::c_str() const {
1445 if (0 == _M_tree_ptr
) {
1446 _S_empty_c_str
[0] = _S_eos((_CharT
*)0); // Possibly redundant,
1447 // but probably fast.
1448 return _S_empty_c_str
;
1450 __GC_CONST _CharT
* __old_c_string
= _M_tree_ptr
->_M_c_string
;
1451 if (0 != __old_c_string
) return(__old_c_string
);
1452 size_t __s
= size();
1453 _CharT
* __result
= _Data_allocate(__s
+ 1);
1454 _S_flatten(_M_tree_ptr
, __result
);
1455 __result
[__s
] = _S_eos((_CharT
*)0);
1457 _M_tree_ptr
->_M_c_string
= __result
;
1459 if ((__old_c_string
=
1460 _S_atomic_swap(&(_M_tree_ptr
->_M_c_string
), __result
)) != 0) {
1461 // It must have been added in the interim. Hence it had to have been
1462 // separately allocated. Deallocate the old copy, since we just
1464 destroy(__old_c_string
, __old_c_string
+ __s
+ 1);
1465 _Data_deallocate(__old_c_string
, __s
+ 1);
1471 template<class _CharT
, class _Alloc
>
1472 const _CharT
* rope
<_CharT
,_Alloc
>::replace_with_c_str() {
1473 if (0 == _M_tree_ptr
) {
1474 _S_empty_c_str
[0] = _S_eos((_CharT
*)0);
1475 return _S_empty_c_str
;
1477 __GC_CONST _CharT
* __old_c_string
= _M_tree_ptr
->_M_c_string
;
1478 if (_RopeRep::_S_leaf
== _M_tree_ptr
->_M_tag
&& 0 != __old_c_string
) {
1479 return(__old_c_string
);
1481 size_t __s
= size();
1482 _CharT
* __result
= _Data_allocate(_S_rounded_up_size(__s
));
1483 _S_flatten(_M_tree_ptr
, __result
);
1484 __result
[__s
] = _S_eos((_CharT
*)0);
1485 _M_tree_ptr
->_M_unref_nonnil();
1486 _M_tree_ptr
= _S_new_RopeLeaf(__result
, __s
, get_allocator());
1490 // Algorithm specializations. More should be added.
1493 // I couldn't get this to work with VC++
1494 template<class _CharT
,class _Alloc
>
1496 _Rope_rotate(_Rope_iterator
<_CharT
,_Alloc
> __first
,
1497 _Rope_iterator
<_CharT
,_Alloc
> __middle
,
1498 _Rope_iterator
<_CharT
,_Alloc
> __last
)
1500 __stl_assert(__first
.container() == __middle
.container()
1501 && __middle
.container() == __last
.container());
1502 rope
<_CharT
,_Alloc
>& __r(__first
.container());
1503 rope
<_CharT
,_Alloc
> __prefix
= __r
.substr(0, __first
.index());
1504 rope
<_CharT
,_Alloc
> __suffix
=
1505 __r
.substr(__last
.index(), __r
.size() - __last
.index());
1506 rope
<_CharT
,_Alloc
> __part1
=
1507 __r
.substr(__middle
.index(), __last
.index() - __middle
.index());
1508 rope
<_CharT
,_Alloc
> __part2
=
1509 __r
.substr(__first
.index(), __middle
.index() - __first
.index());
1516 #if !defined(__GNUC__)
1517 // Appears to confuse g++
1518 inline void rotate(_Rope_iterator
<char,__STL_DEFAULT_ALLOCATOR(char)> __first
,
1519 _Rope_iterator
<char,__STL_DEFAULT_ALLOCATOR(char)> __middle
,
1520 _Rope_iterator
<char,__STL_DEFAULT_ALLOCATOR(char)> __last
) {
1521 _Rope_rotate(__first
, __middle
, __last
);
1526 // Probably not useful for several reasons:
1527 // - for SGIs 7.1 compiler and probably some others,
1528 // this forces lots of rope<wchar_t, ...> instantiations, creating a
1529 // code bloat and compile time problem. (Fixed in 7.2.)
1530 // - wchar_t is 4 bytes wide on most UNIX platforms, making it unattractive
1531 // for unicode strings. Unsigned short may be a better character
1534 _Rope_iterator
<wchar_t,__STL_DEFAULT_ALLOCATOR(char)> __first
,
1535 _Rope_iterator
<wchar_t,__STL_DEFAULT_ALLOCATOR(char)> __middle
,
1536 _Rope_iterator
<wchar_t,__STL_DEFAULT_ALLOCATOR(char)> __last
) {
1537 _Rope_rotate(__first
, __middle
, __last
);
1540 #endif /* _MSC_VER */
1542 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
1543 #pragma reset woff 1174