PageRenderTime 72ms CodeModel.GetById 27ms RepoModel.GetById 0ms app.codeStats 1ms

/project/jni/stlport/stlport/stl/_rope.h

https://github.com/aichunyu/FFPlayer
C Header | 2374 lines | 1723 code | 304 blank | 347 comment | 133 complexity | 4dc8c97b4cf11125c61b0022351b463f MD5 | raw file
Possible License(s): LGPL-3.0, 0BSD, Apache-2.0, LGPL-2.1, GPL-2.0, CC-BY-SA-3.0, LGPL-2.0, BSD-3-Clause
  1. /*
  2. *
  3. * Copyright (c) 1996,1997
  4. * Silicon Graphics Computer Systems, Inc.
  5. *
  6. * Copyright (c) 1997
  7. * Moscow Center for SPARC Technology
  8. *
  9. * Copyright (c) 1999
  10. * Boris Fomitchev
  11. *
  12. * This material is provided "as is", with absolutely no warranty expressed
  13. * or implied. Any use is at your own risk.
  14. *
  15. * Permission to use or copy this software for any purpose is hereby granted
  16. * without fee, provided the above notices are retained on all copies.
  17. * Permission to modify the code and to distribute modified code is granted,
  18. * provided the above notices are retained, and a notice that the code was
  19. * modified is included with the above copyright notice.
  20. *
  21. */
  22. /* NOTE: This is an internal header file, included by other STL headers.
  23. * You should not attempt to use it directly.
  24. */
  25. // rope<_CharT,_Alloc> is a sequence of _CharT.
  26. // Ropes appear to be mutable, but update operations
  27. // really copy enough of the data structure to leave the original
  28. // valid. Thus ropes can be logically copied by just copying
  29. // a pointer value.
  30. #ifndef _STLP_INTERNAL_ROPE_H
  31. #define _STLP_INTERNAL_ROPE_H
  32. #ifndef _STLP_INTERNAL_ALGOBASE_H
  33. # include <stl/_algobase.h>
  34. #endif
  35. #ifndef _STLP_IOSFWD
  36. # include <iosfwd>
  37. #endif
  38. #ifndef _STLP_INTERNAL_ALLOC_H
  39. # include <stl/_alloc.h>
  40. #endif
  41. #ifndef _STLP_INTERNAL_ITERATOR_H
  42. # include <stl/_iterator.h>
  43. #endif
  44. #ifndef _STLP_INTERNAL_ALGO_H
  45. # include <stl/_algo.h>
  46. #endif
  47. #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
  48. # include <stl/_function_base.h>
  49. #endif
  50. #ifndef _STLP_INTERNAL_NUMERIC_H
  51. # include <stl/_numeric.h>
  52. #endif
  53. #ifndef _STLP_INTERNAL_HASH_FUN_H
  54. # include <stl/_hash_fun.h>
  55. #endif
  56. #ifndef _STLP_CHAR_TRAITS_H
  57. # include <stl/char_traits.h>
  58. #endif
  59. #ifndef _STLP_INTERNAL_THREADS_H
  60. # include <stl/_threads.h>
  61. #endif
  62. #ifdef _STLP_SGI_THREADS
  63. # include <mutex.h>
  64. #endif
  65. #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
  66. # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
  67. #elif defined(__MRC__)||defined(__SC__)
  68. # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
  69. #else
  70. # define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
  71. #endif
  72. _STLP_BEGIN_NAMESPACE
  73. // First a lot of forward declarations. The standard seems to require
  74. // much stricter "declaration before use" than many of the implementations
  75. // that preceded it.
  76. template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
  77. template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
  78. template<class _CharT, class _Alloc> struct _Rope_RopeRep;
  79. template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
  80. template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
  81. template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
  82. template<class _CharT, class _Alloc> class _Rope_iterator;
  83. template<class _CharT, class _Alloc> class _Rope_const_iterator;
  84. template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
  85. template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
  86. _STLP_MOVE_TO_PRIV_NAMESPACE
  87. // Some helpers, so we can use the power algorithm on ropes.
  88. // See below for why this isn't local to the implementation.
  89. // This uses a nonstandard refcount convention.
  90. // The result has refcount 0.
  91. template<class _CharT, class _Alloc>
  92. struct _Rope_Concat_fn
  93. : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
  94. rope<_CharT,_Alloc> > {
  95. rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
  96. const rope<_CharT,_Alloc>& __y) {
  97. return __x + __y;
  98. }
  99. };
  100. template <class _CharT, class _Alloc>
  101. inline
  102. rope<_CharT,_Alloc>
  103. __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
  104. { return rope<_CharT,_Alloc>(); }
  105. _STLP_MOVE_TO_STD_NAMESPACE
  106. // Store an eos
  107. template <class _CharT>
  108. inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
  109. { *__p = 0; }
  110. template <class _CharT>
  111. inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
  112. { _STLP_STD::_Construct(__p); }
  113. template <class _CharT>
  114. inline void _S_construct_null(_CharT *__p) {
  115. typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
  116. _S_construct_null_aux(__p, _Char_Is_Integral());
  117. }
  118. // char_producers are logically functions that generate a section of
  119. // a string. These can be converted to ropes. The resulting rope
  120. // invokes the char_producer on demand. This allows, for example,
  121. // files to be viewed as ropes without reading the entire file.
  122. template <class _CharT>
  123. class char_producer {
  124. public:
  125. virtual ~char_producer() {}
  126. virtual void operator()(size_t __start_pos, size_t __len,
  127. _CharT* __buffer) = 0;
  128. // Buffer should really be an arbitrary output iterator.
  129. // That way we could flatten directly into an ostream, etc.
  130. // This is thoroughly impossible, since iterator types don't
  131. // have runtime descriptions.
  132. };
  133. // Sequence buffers:
  134. //
  135. // Sequence must provide an append operation that appends an
  136. // array to the sequence. Sequence buffers are useful only if
  137. // appending an entire array is cheaper than appending element by element.
  138. // This is true for many string representations.
  139. // This should perhaps inherit from ostream<sequence::value_type>
  140. // and be implemented correspondingly, so that they can be used
  141. // for formatted. For the sake of portability, we don't do this yet.
  142. //
  143. // For now, sequence buffers behave as output iterators. But they also
  144. // behave a little like basic_ostringstream<sequence::value_type> and a
  145. // little like containers.
  146. template<class _Sequence
  147. # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
  148. defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
  149. , size_t _Buf_sz = 100
  150. # if defined(__sgi) && !defined(__GNUC__)
  151. # define __TYPEDEF_WORKAROUND
  152. ,class _V = typename _Sequence::value_type
  153. # endif /* __sgi */
  154. # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
  155. >
  156. // The 3rd parameter works around a common compiler bug.
  157. class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
  158. public:
  159. # ifndef __TYPEDEF_WORKAROUND
  160. typedef typename _Sequence::value_type value_type;
  161. typedef sequence_buffer<_Sequence
  162. # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
  163. defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
  164. , _Buf_sz
  165. > _Self;
  166. # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
  167. > _Self;
  168. enum { _Buf_sz = 100};
  169. # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
  170. // # endif
  171. # else /* __TYPEDEF_WORKAROUND */
  172. typedef _V value_type;
  173. typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
  174. # endif /* __TYPEDEF_WORKAROUND */
  175. protected:
  176. _Sequence* _M_prefix;
  177. value_type _M_buffer[_Buf_sz];
  178. size_t _M_buf_count;
  179. public:
  180. void flush() {
  181. _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
  182. _M_buf_count = 0;
  183. }
  184. ~sequence_buffer() { flush(); }
  185. sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
  186. sequence_buffer(const _Self& __x) {
  187. _M_prefix = __x._M_prefix;
  188. _M_buf_count = __x._M_buf_count;
  189. copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
  190. }
  191. sequence_buffer(_Self& __x) {
  192. __x.flush();
  193. _M_prefix = __x._M_prefix;
  194. _M_buf_count = 0;
  195. }
  196. sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
  197. _Self& operator= (_Self& __x) {
  198. __x.flush();
  199. _M_prefix = __x._M_prefix;
  200. _M_buf_count = 0;
  201. return *this;
  202. }
  203. _Self& operator= (const _Self& __x) {
  204. _M_prefix = __x._M_prefix;
  205. _M_buf_count = __x._M_buf_count;
  206. copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
  207. return *this;
  208. }
  209. void push_back(value_type __x) {
  210. if (_M_buf_count < _Buf_sz) {
  211. _M_buffer[_M_buf_count] = __x;
  212. ++_M_buf_count;
  213. } else {
  214. flush();
  215. _M_buffer[0] = __x;
  216. _M_buf_count = 1;
  217. }
  218. }
  219. void append(const value_type *__s, size_t __len) {
  220. if (__len + _M_buf_count <= _Buf_sz) {
  221. size_t __i = _M_buf_count;
  222. size_t __j = 0;
  223. for (; __j < __len; __i++, __j++) {
  224. _M_buffer[__i] = __s[__j];
  225. }
  226. _M_buf_count += __len;
  227. } else if (0 == _M_buf_count) {
  228. _M_prefix->append(__s, __s + __len);
  229. } else {
  230. flush();
  231. append(__s, __len);
  232. }
  233. }
  234. _Self& write(const value_type *__s, size_t __len) {
  235. append(__s, __len);
  236. return *this;
  237. }
  238. _Self& put(value_type __x) {
  239. push_back(__x);
  240. return *this;
  241. }
  242. _Self& operator=(const value_type& __rhs) {
  243. push_back(__rhs);
  244. return *this;
  245. }
  246. _Self& operator*() { return *this; }
  247. _Self& operator++() { return *this; }
  248. _Self& operator++(int) { return *this; }
  249. };
  250. // The following should be treated as private, at least for now.
  251. template<class _CharT>
  252. class _Rope_char_consumer {
  253. #if !defined (_STLP_MEMBER_TEMPLATES)
  254. public:
  255. //Without member templates we have to use run-time parameterization.
  256. // The symmetry with char_producer is accidental and temporary.
  257. virtual ~_Rope_char_consumer() {}
  258. virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
  259. #endif
  260. };
  261. //
  262. // What follows should really be local to rope. Unfortunately,
  263. // that doesn't work, since it makes it impossible to define generic
  264. // equality on rope iterators. According to the draft standard, the
  265. // template parameters for such an equality operator cannot be inferred
  266. // from the occurence of a member class as a parameter.
  267. // (SGI compilers in fact allow this, but the __result wouldn't be
  268. // portable.)
  269. // Similarly, some of the static member functions are member functions
  270. // only to avoid polluting the global namespace, and to circumvent
  271. // restrictions on type inference for template functions.
  272. //
  273. //
  274. // The internal data structure for representing a rope. This is
  275. // private to the implementation. A rope is really just a pointer
  276. // to one of these.
  277. //
  278. // A few basic functions for manipulating this data structure
  279. // are members of _RopeRep. Most of the more complex algorithms
  280. // are implemented as rope members.
  281. //
  282. // Some of the static member functions of _RopeRep have identically
  283. // named functions in rope that simply invoke the _RopeRep versions.
  284. //
  285. template<class _CharT, class _Alloc>
  286. struct _Rope_RopeRep
  287. : public _Refcount_Base
  288. {
  289. typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
  290. public:
  291. //
  292. // GAB: 11/09/05
  293. //
  294. // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
  295. // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
  296. // but this addition causes the sunpro compiler to complain about
  297. // multiple declarations during the initialization of "_S_min_len".
  298. // Changed to be a fixed value and the sunpro compiler appears to
  299. // be happy???
  300. //
  301. # define __ROPE_MAX_DEPTH 45
  302. # define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
  303. enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
  304. enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
  305. // Apparently needed by VC++
  306. // The data fields of leaves are allocated with some
  307. // extra space, to accomodate future growth and for basic
  308. // character types, to hold a trailing eos character.
  309. enum { _S_alloc_granularity = 8 };
  310. _Tag _M_tag:8;
  311. bool _M_is_balanced:8;
  312. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  313. typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
  314. allocator_type get_allocator() const { return allocator_type(_M_size); }
  315. unsigned char _M_depth;
  316. _CharT* _STLP_VOLATILE _M_c_string;
  317. _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
  318. # ifdef _STLP_NO_ARROW_OPERATOR
  319. _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
  320. # endif
  321. /* Flattened version of string, if needed. */
  322. /* typically 0. */
  323. /* If it's not 0, then the memory is owned */
  324. /* by this node. */
  325. /* In the case of a leaf, this may point to */
  326. /* the same memory as the data field. */
  327. _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
  328. allocator_type __a) :
  329. _Refcount_Base(1),
  330. _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
  331. { }
  332. typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
  333. # ifdef _STLP_HAS_WCHAR_T
  334. typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
  335. # else
  336. typedef __false_type _IsWCharT;
  337. # endif
  338. typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
  339. #if 0
  340. /* Please tell why this code is necessary if you uncomment it.
  341. * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
  342. * returns a size > n in order to store the terminating null charater. When
  343. * instanciation type is not a char or wchar_t this is not guaranty resulting in
  344. * memory overrun.
  345. */
  346. static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
  347. // Allow slop for in-place expansion.
  348. return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
  349. }
  350. static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
  351. // Allow slop for in-place expansion.
  352. return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
  353. }
  354. #endif
  355. // fbp : moved from RopeLeaf
  356. static size_t _S_rounded_up_size(size_t __n)
  357. //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
  358. { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
  359. static void _S_free_string( _CharT* __s, size_t __len,
  360. allocator_type __a) {
  361. _STLP_STD::_Destroy_Range(__s, __s + __len);
  362. // This has to be a static member, so this gets a bit messy
  363. # ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
  364. __a.deallocate(__s, _S_rounded_up_size(__len)); //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
  365. # else
  366. __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
  367. # endif
  368. }
  369. // Deallocate data section of a leaf.
  370. // This shouldn't be a member function.
  371. // But its hard to do anything else at the
  372. // moment, because it's templatized w.r.t.
  373. // an allocator.
  374. // Does nothing if __GC is defined.
  375. void _M_free_c_string();
  376. void _M_free_tree();
  377. // Deallocate t. Assumes t is not 0.
  378. void _M_unref_nonnil() {
  379. if (_M_decr() == 0) _M_free_tree();
  380. }
  381. void _M_ref_nonnil() {
  382. _M_incr();
  383. }
  384. static void _S_unref(_Self* __t) {
  385. if (0 != __t) {
  386. __t->_M_unref_nonnil();
  387. }
  388. }
  389. static void _S_ref(_Self* __t) {
  390. if (0 != __t) __t->_M_incr();
  391. }
  392. //static void _S_free_if_unref(_Self* __t) {
  393. // if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
  394. //}
  395. };
  396. template<class _CharT, class _Alloc>
  397. struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
  398. public:
  399. _CharT* _M_data; /* Not necessarily 0 terminated. */
  400. /* The allocated size is */
  401. /* _S_rounded_up_size(size), except */
  402. /* in the GC case, in which it */
  403. /* doesn't matter. */
  404. private:
  405. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  406. typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
  407. void _M_init(__true_type const& /*_IsBasicCharType*/) {
  408. this->_M_c_string = _M_data;
  409. }
  410. void _M_init(__false_type const& /*_IsBasicCharType*/) {}
  411. public:
  412. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  413. typedef typename _RopeRep::allocator_type allocator_type;
  414. _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
  415. : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
  416. _M_data(__d) {
  417. _STLP_ASSERT(_p_size > 0)
  418. _M_init(_IsBasicCharType());
  419. }
  420. # ifdef _STLP_NO_ARROW_OPERATOR
  421. _Rope_RopeLeaf() {}
  422. _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
  423. # endif
  424. // The constructor assumes that d has been allocated with
  425. // the proper allocator and the properly padded size.
  426. // In contrast, the destructor deallocates the data:
  427. ~_Rope_RopeLeaf() {
  428. if (_M_data != this->_M_c_string) {
  429. this->_M_free_c_string();
  430. }
  431. _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
  432. }
  433. };
  434. template<class _CharT, class _Alloc>
  435. struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
  436. private:
  437. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  438. public:
  439. _RopeRep* _M_left;
  440. _RopeRep* _M_right;
  441. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  442. typedef typename _RopeRep::allocator_type allocator_type;
  443. _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
  444. : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
  445. (max)(__l->_M_depth, __r->_M_depth) + 1, false,
  446. __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
  447. {}
  448. # ifdef _STLP_NO_ARROW_OPERATOR
  449. _Rope_RopeConcatenation() {}
  450. _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
  451. # endif
  452. ~_Rope_RopeConcatenation() {
  453. this->_M_free_c_string();
  454. _M_left->_M_unref_nonnil();
  455. _M_right->_M_unref_nonnil();
  456. }
  457. };
  458. template <class _CharT, class _Alloc>
  459. struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
  460. private:
  461. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  462. public:
  463. char_producer<_CharT>* _M_fn;
  464. /*
  465. * Char_producer is owned by the
  466. * rope and should be explicitly
  467. * deleted when the rope becomes
  468. * inaccessible.
  469. */
  470. bool _M_delete_when_done;
  471. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  472. typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
  473. # ifdef _STLP_NO_ARROW_OPERATOR
  474. _Rope_RopeFunction() {}
  475. _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
  476. # endif
  477. _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
  478. bool __d, allocator_type __a)
  479. : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
  480. , _M_delete_when_done(__d)
  481. { _STLP_ASSERT(_p_size > 0) }
  482. ~_Rope_RopeFunction() {
  483. this->_M_free_c_string();
  484. if (_M_delete_when_done) {
  485. delete _M_fn;
  486. }
  487. }
  488. };
  489. /*
  490. * Substring results are usually represented using just
  491. * concatenation nodes. But in the case of very long flat ropes
  492. * or ropes with a functional representation that isn't practical.
  493. * In that case, we represent the __result as a special case of
  494. * RopeFunction, whose char_producer points back to the rope itself.
  495. * In all cases except repeated substring operations and
  496. * deallocation, we treat the __result as a RopeFunction.
  497. */
  498. template<class _CharT, class _Alloc>
  499. struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
  500. public:
  501. // XXX this whole class should be rewritten.
  502. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  503. _RopeRep *_M_base; // not 0
  504. size_t _M_start;
  505. /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
  506. _CharT* __buffer) {
  507. typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
  508. typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
  509. switch (_M_base->_M_tag) {
  510. case _RopeRep::_S_function:
  511. case _RopeRep::_S_substringfn:
  512. {
  513. char_producer<_CharT>* __fn =
  514. __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
  515. _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
  516. _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
  517. (*__fn)(__start_pos + _M_start, __req_len, __buffer);
  518. }
  519. break;
  520. case _RopeRep::_S_leaf:
  521. {
  522. _CharT* __s =
  523. __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
  524. _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
  525. }
  526. break;
  527. default:
  528. _STLP_ASSERT(false)
  529. ;
  530. }
  531. }
  532. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  533. typedef typename _RopeRep::allocator_type allocator_type;
  534. _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
  535. : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
  536. _M_base(__b), _M_start(__s) {
  537. _STLP_ASSERT(__l > 0)
  538. _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
  539. _M_base->_M_ref_nonnil();
  540. this->_M_tag = _RopeRep::_S_substringfn;
  541. }
  542. virtual ~_Rope_RopeSubstring()
  543. { _M_base->_M_unref_nonnil(); }
  544. };
  545. /*
  546. * Self-destructing pointers to Rope_rep.
  547. * These are not conventional smart pointers. Their
  548. * only purpose in life is to ensure that unref is called
  549. * on the pointer either at normal exit or if an exception
  550. * is raised. It is the caller's responsibility to
  551. * adjust reference counts when these pointers are initialized
  552. * or assigned to. (This convention significantly reduces
  553. * the number of potentially expensive reference count
  554. * updates.)
  555. */
  556. template<class _CharT, class _Alloc>
  557. struct _Rope_self_destruct_ptr {
  558. _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
  559. ~_Rope_self_destruct_ptr()
  560. { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
  561. # ifdef _STLP_USE_EXCEPTIONS
  562. _Rope_self_destruct_ptr() : _M_ptr(0) {}
  563. # else
  564. _Rope_self_destruct_ptr() {}
  565. # endif
  566. _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
  567. _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
  568. _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
  569. operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
  570. _Rope_self_destruct_ptr<_CharT, _Alloc>&
  571. operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
  572. { _M_ptr = __x; return *this; }
  573. };
  574. /*
  575. * Dereferencing a nonconst iterator has to return something
  576. * that behaves almost like a reference. It's not possible to
  577. * return an actual reference since assignment requires extra
  578. * work. And we would get into the same problems as with the
  579. * CD2 version of basic_string.
  580. */
  581. template<class _CharT, class _Alloc>
  582. class _Rope_char_ref_proxy {
  583. typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
  584. friend class rope<_CharT,_Alloc>;
  585. friend class _Rope_iterator<_CharT,_Alloc>;
  586. friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
  587. typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
  588. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  589. typedef rope<_CharT,_Alloc> _My_rope;
  590. size_t _M_pos;
  591. _CharT _M_current;
  592. bool _M_current_valid;
  593. _My_rope* _M_root; // The whole rope.
  594. public:
  595. _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
  596. _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
  597. _Rope_char_ref_proxy(const _Self& __x) :
  598. _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
  599. // Don't preserve cache if the reference can outlive the
  600. // expression. We claim that's not possible without calling
  601. // a copy constructor or generating reference to a proxy
  602. // reference. We declare the latter to have undefined semantics.
  603. _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
  604. : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
  605. inline operator _CharT () const;
  606. _Self& operator= (_CharT __c);
  607. _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
  608. _Self& operator= (const _Self& __c) {
  609. return operator=((_CharT)__c);
  610. }
  611. };
  612. #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
  613. template<class _CharT, class __Alloc>
  614. inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
  615. _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
  616. _CharT __tmp = __a;
  617. __a = __b;
  618. __b = __tmp;
  619. }
  620. #else
  621. // There is no really acceptable way to handle this. The default
  622. // definition of swap doesn't work for proxy references.
  623. // It can't really be made to work, even with ugly hacks, since
  624. // the only unusual operation it uses is the copy constructor, which
  625. // is needed for other purposes. We provide a macro for
  626. // full specializations, and instantiate the most common case.
  627. # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
  628. inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
  629. _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
  630. _CharT __tmp = __a; \
  631. __a = __b; \
  632. __b = __tmp; \
  633. }
  634. _ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
  635. # ifndef _STLP_NO_WCHAR_T
  636. _ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
  637. # endif
  638. #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
  639. template<class _CharT, class _Alloc>
  640. class _Rope_char_ptr_proxy {
  641. // XXX this class should be rewritten.
  642. public:
  643. typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
  644. friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
  645. size_t _M_pos;
  646. rope<_CharT,_Alloc>* _M_root; // The whole rope.
  647. _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
  648. : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
  649. _Rope_char_ptr_proxy(const _Self& __x)
  650. : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
  651. _Rope_char_ptr_proxy() {}
  652. _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
  653. _STLP_ASSERT(0 == __x)
  654. }
  655. _Self& operator= (const _Self& __x) {
  656. _M_pos = __x._M_pos;
  657. _M_root = __x._M_root;
  658. return *this;
  659. }
  660. _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
  661. return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
  662. }
  663. };
  664. /*
  665. * Rope iterators:
  666. * Unlike in the C version, we cache only part of the stack
  667. * for rope iterators, since they must be efficiently copyable.
  668. * When we run out of cache, we have to reconstruct the iterator
  669. * value.
  670. * Pointers from iterators are not included in reference counts.
  671. * Iterators are assumed to be thread private. Ropes can
  672. * be shared.
  673. */
  674. template<class _CharT, class _Alloc>
  675. class _Rope_iterator_base
  676. /* : public random_access_iterator<_CharT, ptrdiff_t> */
  677. {
  678. friend class rope<_CharT,_Alloc>;
  679. typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
  680. typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
  681. public:
  682. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  683. enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
  684. enum { _S_iterator_buf_len = 15 };
  685. size_t _M_current_pos;
  686. // The whole rope.
  687. _RopeRep* _M_root;
  688. // Starting position for current leaf
  689. size_t _M_leaf_pos;
  690. // Buffer possibly containing current char.
  691. _CharT* _M_buf_start;
  692. // Pointer to current char in buffer, != 0 ==> buffer valid.
  693. _CharT* _M_buf_ptr;
  694. // One past __last valid char in buffer.
  695. _CharT* _M_buf_end;
  696. // What follows is the path cache. We go out of our
  697. // way to make this compact.
  698. // Path_end contains the bottom section of the path from
  699. // the root to the current leaf.
  700. struct {
  701. # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
  702. _RopeRep const*_M_data[4];
  703. # else
  704. _RopeRep const*_M_data[_S_path_cache_len];
  705. # endif
  706. } _M_path_end;
  707. // Last valid __pos in path_end;
  708. // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
  709. // point to concatenation nodes.
  710. int _M_leaf_index;
  711. // (_M_path_directions >> __i) & 1 is 1
  712. // if we got from _M_path_end[leaf_index - __i - 1]
  713. // to _M_path_end[leaf_index - __i] by going to the
  714. // __right. Assumes path_cache_len <= 9.
  715. unsigned char _M_path_directions;
  716. // Short buffer for surrounding chars.
  717. // This is useful primarily for
  718. // RopeFunctions. We put the buffer
  719. // here to avoid locking in the
  720. // multithreaded case.
  721. // The cached path is generally assumed to be valid
  722. // only if the buffer is valid.
  723. struct {
  724. # if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
  725. _CharT _M_data[15];
  726. # else
  727. _CharT _M_data[_S_iterator_buf_len];
  728. # endif
  729. } _M_tmp_buf;
  730. // Set buffer contents given path cache.
  731. static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
  732. // Set buffer contents and path cache.
  733. static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
  734. // As above, but assumes path cache is valid for previous posn.
  735. static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
  736. _Rope_iterator_base() {}
  737. _Rope_iterator_base(_RopeRep* __root, size_t __pos)
  738. : _M_current_pos(__pos),_M_root(__root), _M_buf_ptr(0) {}
  739. void _M_incr(size_t __n);
  740. void _M_decr(size_t __n);
  741. public:
  742. size_t index() const { return _M_current_pos; }
  743. private:
  744. void _M_copy_buf(const _Self& __x) {
  745. _M_tmp_buf = __x._M_tmp_buf;
  746. if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
  747. _M_buf_start = _M_tmp_buf._M_data;
  748. _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
  749. _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
  750. } else {
  751. _M_buf_end = __x._M_buf_end;
  752. }
  753. }
  754. public:
  755. _Rope_iterator_base(const _Self& __x) :
  756. _M_current_pos(__x._M_current_pos),
  757. _M_root(__x._M_root),
  758. _M_leaf_pos( __x._M_leaf_pos ),
  759. _M_buf_start(__x._M_buf_start),
  760. _M_buf_ptr(__x._M_buf_ptr),
  761. _M_path_end(__x._M_path_end),
  762. _M_leaf_index(__x._M_leaf_index),
  763. _M_path_directions(__x._M_path_directions)
  764. {
  765. if (0 != __x._M_buf_ptr) {
  766. _M_copy_buf(__x);
  767. }
  768. }
  769. _Self& operator = (const _Self& __x)
  770. {
  771. _M_current_pos = __x._M_current_pos;
  772. _M_root = __x._M_root;
  773. _M_buf_start = __x._M_buf_start;
  774. _M_buf_ptr = __x._M_buf_ptr;
  775. _M_path_end = __x._M_path_end;
  776. _M_leaf_index = __x._M_leaf_index;
  777. _M_path_directions = __x._M_path_directions;
  778. _M_leaf_pos = __x._M_leaf_pos;
  779. if (0 != __x._M_buf_ptr) {
  780. _M_copy_buf(__x);
  781. }
  782. return *this;
  783. }
  784. };
  785. template<class _CharT, class _Alloc> class _Rope_iterator;
  786. template<class _CharT, class _Alloc>
  787. class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
  788. friend class rope<_CharT,_Alloc>;
  789. typedef _Rope_const_iterator<_CharT, _Alloc> _Self;
  790. typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
  791. // protected:
  792. public:
  793. # ifndef _STLP_HAS_NO_NAMESPACES
  794. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  795. // The one from the base class may not be directly visible.
  796. # endif
  797. _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
  798. _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
  799. // Only nonconst iterators modify root ref count
  800. {}
  801. public:
  802. typedef _CharT reference; // Really a value. Returning a reference
  803. // Would be a mess, since it would have
  804. // to be included in refcount.
  805. typedef const _CharT* pointer;
  806. typedef _CharT value_type;
  807. typedef ptrdiff_t difference_type;
  808. typedef random_access_iterator_tag iterator_category;
  809. public:
  810. _Rope_const_iterator() {}
  811. _Rope_const_iterator(const _Self& __x) :
  812. _Rope_iterator_base<_CharT,_Alloc>(__x) { }
  813. _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
  814. _Rope_iterator_base<_CharT,_Alloc>(__x) {}
  815. _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
  816. _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
  817. _Self& operator= (const _Self& __x) {
  818. _Base::operator=(__x);
  819. return *this;
  820. }
  821. reference operator*() {
  822. if (0 == this->_M_buf_ptr)
  823. #if !defined (__DMC__)
  824. _S_setcache(*this);
  825. #else
  826. { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
  827. #endif
  828. return *(this->_M_buf_ptr);
  829. }
  830. _Self& operator++() {
  831. _CharT* __next;
  832. if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
  833. this->_M_buf_ptr = __next;
  834. ++this->_M_current_pos;
  835. } else {
  836. this->_M_incr(1);
  837. }
  838. return *this;
  839. }
  840. _Self& operator+=(ptrdiff_t __n) {
  841. if (__n >= 0) {
  842. this->_M_incr(__n);
  843. } else {
  844. this->_M_decr(-__n);
  845. }
  846. return *this;
  847. }
  848. _Self& operator--() {
  849. this->_M_decr(1);
  850. return *this;
  851. }
  852. _Self& operator-=(ptrdiff_t __n) {
  853. if (__n >= 0) {
  854. this->_M_decr(__n);
  855. } else {
  856. this->_M_incr(-__n);
  857. }
  858. return *this;
  859. }
  860. _Self operator++(int) {
  861. size_t __old_pos = this->_M_current_pos;
  862. this->_M_incr(1);
  863. return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
  864. // This makes a subsequent dereference expensive.
  865. // Perhaps we should instead copy the iterator
  866. // if it has a valid cache?
  867. }
  868. _Self operator--(int) {
  869. size_t __old_pos = this->_M_current_pos;
  870. this->_M_decr(1);
  871. return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
  872. }
  873. inline reference operator[](size_t __n);
  874. };
  875. template<class _CharT, class _Alloc>
  876. class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
  877. friend class rope<_CharT,_Alloc>;
  878. typedef _Rope_iterator<_CharT, _Alloc> _Self;
  879. typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
  880. typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
  881. public:
  882. rope<_CharT,_Alloc>* _M_root_rope;
  883. // root is treated as a cached version of this,
  884. // and is used to detect changes to the underlying
  885. // rope.
  886. // Root is included in the reference count.
  887. // This is necessary so that we can detect changes reliably.
  888. // Unfortunately, it requires careful bookkeeping for the
  889. // nonGC case.
  890. _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
  891. void _M_check();
  892. public:
  893. typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
  894. typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
  895. typedef _CharT value_type;
  896. typedef ptrdiff_t difference_type;
  897. typedef random_access_iterator_tag iterator_category;
  898. public:
  899. ~_Rope_iterator() { //*TY 5/6/00 - added dtor to balance reference count
  900. _RopeRep::_S_unref(this->_M_root);
  901. }
  902. rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
  903. _Rope_iterator() {
  904. this->_M_root = 0; // Needed for reference counting.
  905. }
  906. _Rope_iterator(const _Self& __x) :
  907. _Rope_iterator_base<_CharT,_Alloc>(__x) {
  908. _M_root_rope = __x._M_root_rope;
  909. _RopeRep::_S_ref(this->_M_root);
  910. }
  911. _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
  912. _Self& operator= (const _Self& __x) {
  913. _RopeRep* __old = this->_M_root;
  914. _RopeRep::_S_ref(__x._M_root);
  915. _Base::operator=(__x);
  916. _M_root_rope = __x._M_root_rope;
  917. _RopeRep::_S_unref(__old);
  918. return *this;
  919. }
  920. reference operator*() {
  921. _M_check();
  922. if (0 == this->_M_buf_ptr) {
  923. return reference(_M_root_rope, this->_M_current_pos);
  924. } else {
  925. return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
  926. }
  927. }
  928. _Self& operator++() {
  929. this->_M_incr(1);
  930. return *this;
  931. }
  932. _Self& operator+=(ptrdiff_t __n) {
  933. if (__n >= 0) {
  934. this->_M_incr(__n);
  935. } else {
  936. this->_M_decr(-__n);
  937. }
  938. return *this;
  939. }
  940. _Self& operator--() {
  941. this->_M_decr(1);
  942. return *this;
  943. }
  944. _Self& operator-=(ptrdiff_t __n) {
  945. if (__n >= 0) {
  946. this->_M_decr(__n);
  947. } else {
  948. this->_M_incr(-__n);
  949. }
  950. return *this;
  951. }
  952. _Self operator++(int) {
  953. size_t __old_pos = this->_M_current_pos;
  954. this->_M_incr(1);
  955. return _Self(_M_root_rope, __old_pos);
  956. }
  957. _Self operator--(int) {
  958. size_t __old_pos = this->_M_current_pos;
  959. this->_M_decr(1);
  960. return _Self(_M_root_rope, __old_pos);
  961. }
  962. reference operator[](ptrdiff_t __n) {
  963. return reference(_M_root_rope, this->_M_current_pos + __n);
  964. }
  965. };
  966. # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
  967. template <class _CharT, class _Alloc>
  968. inline random_access_iterator_tag
  969. iterator_category(const _Rope_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag();}
  970. template <class _CharT, class _Alloc>
  971. inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  972. template <class _CharT, class _Alloc>
  973. inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
  974. template <class _CharT, class _Alloc>
  975. inline random_access_iterator_tag
  976. iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
  977. template <class _CharT, class _Alloc>
  978. inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  979. template <class _CharT, class _Alloc>
  980. inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
  981. #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
  982. template <class _CharT, class _Alloc, class _CharConsumer>
  983. bool _S_apply_to_pieces(_CharConsumer& __c,
  984. _Rope_RopeRep<_CharT, _Alloc> *__r,
  985. size_t __begin, size_t __end);
  986. // begin and end are assumed to be in range.
  987. template <class _CharT, class _Alloc>
  988. class rope
  989. #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
  990. : public __stlport_class<rope<_CharT, _Alloc> >
  991. #endif
  992. {
  993. typedef rope<_CharT,_Alloc> _Self;
  994. public:
  995. typedef _CharT value_type;
  996. typedef ptrdiff_t difference_type;
  997. typedef size_t size_type;
  998. typedef _CharT const_reference;
  999. typedef const _CharT* const_pointer;
  1000. typedef _Rope_iterator<_CharT,_Alloc> iterator;
  1001. typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
  1002. typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
  1003. typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
  1004. friend class _Rope_iterator<_CharT,_Alloc>;
  1005. friend class _Rope_const_iterator<_CharT,_Alloc>;
  1006. friend struct _Rope_RopeRep<_CharT,_Alloc>;
  1007. friend class _Rope_iterator_base<_CharT,_Alloc>;
  1008. friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
  1009. friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
  1010. friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
  1011. _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
  1012. protected:
  1013. typedef _CharT* _Cstrptr;
  1014. static _CharT _S_empty_c_str[1];
  1015. enum { _S_copy_max = 23 };
  1016. // For strings shorter than _S_copy_max, we copy to
  1017. // concatenate.
  1018. typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
  1019. typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
  1020. public:
  1021. _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
  1022. typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
  1023. public:
  1024. // The only data member of a rope:
  1025. _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
  1026. public:
  1027. allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
  1028. public:
  1029. typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
  1030. typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
  1031. typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
  1032. typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
  1033. // Retrieve a character at the indicated position.
  1034. static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
  1035. // Obtain a pointer to the character at the indicated position.
  1036. // The pointer can be used to change the character.
  1037. // If such a pointer cannot be produced, as is frequently the
  1038. // case, 0 is returned instead.
  1039. // (Returns nonzero only if all nodes in the path have a refcount
  1040. // of 1.)
  1041. static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
  1042. static void _S_unref(_RopeRep* __t) {
  1043. _RopeRep::_S_unref(__t);
  1044. }
  1045. static void _S_ref(_RopeRep* __t) {
  1046. _RopeRep::_S_ref(__t);
  1047. }
  1048. typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
  1049. // _Result is counted in refcount.
  1050. static _RopeRep* _S_substring(_RopeRep* __base,
  1051. size_t __start, size_t __endp1);
  1052. static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
  1053. const _CharT* __iter, size_t __slen);
  1054. // Concatenate rope and char ptr, copying __s.
  1055. // Should really take an arbitrary iterator.
  1056. // Result is counted in refcount.
  1057. static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
  1058. const _CharT* __iter, size_t __slen);
  1059. // As above, but one reference to __r is about to be
  1060. // destroyed. Thus the pieces may be recycled if all
  1061. // relevent reference counts are 1.
  1062. // General concatenation on _RopeRep. _Result
  1063. // has refcount of 1. Adjusts argument refcounts.
  1064. static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
  1065. public:
  1066. #if defined (_STLP_MEMBER_TEMPLATES)
  1067. template <class _CharConsumer>
  1068. #else
  1069. typedef _Rope_char_consumer<_CharT> _CharConsumer;
  1070. #endif
  1071. void apply_to_pieces(size_t __begin, size_t __end,
  1072. _CharConsumer& __c) const
  1073. { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
  1074. protected:
  1075. static size_t _S_rounded_up_size(size_t __n)
  1076. { return _RopeRep::_S_rounded_up_size(__n); }
  1077. // Allocate and construct a RopeLeaf using the supplied allocator
  1078. // Takes ownership of s instead of copying.
  1079. static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
  1080. size_t _p_size, allocator_type __a) {
  1081. _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1082. _RopeLeaf).allocate(1);
  1083. _STLP_TRY {
  1084. _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
  1085. }
  1086. _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
  1087. _RopeLeaf).deallocate(__space, 1))
  1088. return __space;
  1089. }
  1090. static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
  1091. allocator_type __a) {
  1092. _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1093. _RopeConcatenation).allocate(1);
  1094. return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
  1095. }
  1096. static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
  1097. size_t _p_size, bool __d, allocator_type __a) {
  1098. _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1099. _RopeFunction).allocate(1);
  1100. return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
  1101. }
  1102. static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
  1103. size_t __l, allocator_type __a) {
  1104. _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
  1105. _RopeSubstring).allocate(1);
  1106. return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
  1107. }
  1108. static
  1109. _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
  1110. size_t _p_size, allocator_type __a) {
  1111. if (0 == _p_size) return 0;
  1112. _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
  1113. _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
  1114. _S_construct_null(__buf + _p_size);
  1115. _STLP_TRY {
  1116. return _S_new_RopeLeaf(__buf, _p_size, __a);
  1117. }
  1118. _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
  1119. _STLP_RET_AFTER_THROW(0)
  1120. }
  1121. // Concatenation of nonempty strings.
  1122. // Always builds a concatenation node.
  1123. // Rebalances if the result is too deep.
  1124. // Result has refcount 1.
  1125. // Does not increment left and right ref counts even though
  1126. // they are referenced.
  1127. static _RopeRep*
  1128. _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
  1129. // Concatenation helper functions
  1130. static _RopeLeaf*
  1131. _S_leaf_concat_char_iter(_RopeLeaf* __r,
  1132. const _CharT* __iter, size_t __slen);
  1133. // Concatenate by copying leaf.
  1134. // should take an arbitrary iterator
  1135. // result has refcount 1.
  1136. static _RopeLeaf* _S_destr_leaf_concat_char_iter
  1137. (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
  1138. // A version that potentially clobbers __r if __r->_M_ref_count == 1.
  1139. // A helper function for exponentiating strings.
  1140. // This uses a nonstandard refcount convention.
  1141. // The result has refcount 0.
  1142. typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1143. #if !defined (__GNUC__) || (__GNUC__ < 3)
  1144. friend _Concat_fn;
  1145. #else
  1146. friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
  1147. #endif
  1148. public:
  1149. static size_t _S_char_ptr_len(const _CharT* __s) {
  1150. return char_traits<_CharT>::length(__s);
  1151. }
  1152. public: /* for operators */
  1153. rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
  1154. : _M_tree_ptr(__a, __t) { }
  1155. private:
  1156. // Copy __r to the _CharT buffer.
  1157. // Returns __buffer + __r->_M_size._M_data.
  1158. // Assumes that buffer is uninitialized.
  1159. static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
  1160. // Again, with explicit starting position and length.
  1161. // Assumes that buffer is uninitialized.
  1162. static _CharT* _S_flatten(_RopeRep* __r,
  1163. size_t __start, size_t __len,
  1164. _CharT* __buffer);
  1165. // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
  1166. public:
  1167. static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
  1168. protected:
  1169. static bool _S_is_balanced(_RopeRep* __r)
  1170. { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
  1171. static bool _S_is_almost_balanced(_RopeRep* __r) {
  1172. return (__r->_M_depth == 0 ||
  1173. __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
  1174. }
  1175. static bool _S_is_roughly_balanced(_RopeRep* __r) {
  1176. return (__r->_M_depth <= 1 ||
  1177. __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
  1178. }
  1179. // Assumes the result is not empty.
  1180. static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
  1181. _RopeRep* __right) {
  1182. _RopeRep* __result = _S_concat_rep(__left, __right);
  1183. if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
  1184. return __result;
  1185. }
  1186. // The basic rebalancing operation. Logically copies the
  1187. // rope. The result has refcount of 1. The client will
  1188. // usually decrement the reference count of __r.
  1189. // The result is within height 2 of balanced by the above
  1190. // definition.
  1191. static _RopeRep* _S_balance(_RopeRep* __r);
  1192. // Add all unbalanced subtrees to the forest of balanceed trees.
  1193. // Used only by balance.
  1194. static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
  1195. // Add __r to forest, assuming __r is already balanced.
  1196. static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
  1197. #ifdef _STLP_DEBUG
  1198. // Print to stdout, exposing structure
  1199. static void _S_dump(_RopeRep* __r, int __indent = 0);
  1200. #endif
  1201. // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
  1202. static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
  1203. void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
  1204. void _M_reset(_RopeRep* __r) {
  1205. //if (__r != _M_tree_ptr._M_data) {
  1206. _S_unref(_M_tree_ptr._M_data);
  1207. _M_tree_ptr._M_data = __r;
  1208. //}
  1209. }
  1210. public:
  1211. bool empty() const { return 0 == _M_tree_ptr._M_data; }
  1212. // Comparison member function. This is public only for those
  1213. // clients that need a ternary comparison. Others
  1214. // should use the comparison operators below.
  1215. int compare(const _Self& __y) const {
  1216. return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
  1217. }
  1218. rope(const _CharT* __s, const allocator_type& __a = allocator_type())
  1219. : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
  1220. {}
  1221. rope(const _CharT* __s, size_t __len,
  1222. const allocator_type& __a = allocator_type())
  1223. : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
  1224. {}
  1225. // Should perhaps be templatized with respect to the iterator type
  1226. // and use Sequence_buffer. (It should perhaps use sequence_buffer
  1227. // even now.)
  1228. rope(const _CharT *__s, const _CharT *__e,
  1229. const allocator_type& __a = allocator_type())
  1230. : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
  1231. {}
  1232. rope(const const_iterator& __s, const const_iterator& __e,
  1233. const allocator_type& __a = allocator_type())
  1234. : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1235. __e._M_current_pos))
  1236. {}
  1237. rope(const iterator& __s, const iterator& __e,
  1238. const allocator_type& __a = allocator_type())
  1239. : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
  1240. __e._M_current_pos))
  1241. {}
  1242. rope(_CharT __c, const allocator_type& __a = allocator_type())
  1243. : _M_tree_ptr(__a, (_RopeRep*)0) {
  1244. _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
  1245. _Copy_Construct(__buf, __c);
  1246. _S_construct_null(__buf + 1);
  1247. _STLP_TRY {
  1248. _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
  1249. }
  1250. _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
  1251. }
  1252. rope(size_t __n, _CharT __c,
  1253. const allocator_type& __a = allocator_type()):
  1254. _M_tree_ptr(__a, (_RopeRep*)0) {
  1255. if (0 == __n)
  1256. return;
  1257. rope<_CharT,_Alloc> __result;
  1258. # define __exponentiate_threshold size_t(32)
  1259. _RopeRep* __remainder;
  1260. rope<_CharT,_Alloc> __remainder_rope;
  1261. // gcc-2.7.2 bugs
  1262. typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
  1263. size_t __exponent = __n / __exponentiate_threshold;
  1264. size_t __rest = __n % __exponentiate_threshold;
  1265. if (0 == __rest) {
  1266. __remainder = 0;
  1267. } else {
  1268. _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
  1269. uninitialized_fill_n(__rest_buffer, __rest, __c);
  1270. _S_construct_null(__rest_buffer + __rest);
  1271. _STLP_TRY {
  1272. __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
  1273. }
  1274. _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
  1275. }
  1276. __remainder_rope._M_tree_ptr._M_data = __remainder;
  1277. if (__exponent != 0) {
  1278. _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
  1279. _RopeLeaf* __base_leaf;
  1280. rope<_CharT,_Alloc> __base_rope;
  1281. uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
  1282. _S_construct_null(__base_buffer + __exponentiate_threshold);
  1283. _STLP_TRY {
  1284. __base_leaf = _S_new_RopeLeaf(__base_buffer,
  1285. __exponentiate_threshold, __a);
  1286. }
  1287. _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
  1288. __exponentiate_threshold, __a))
  1289. __base_rope._M_tree_ptr._M_data = __base_leaf;
  1290. if (1 == __exponent) {
  1291. __result = __base_rope;
  1292. // One each for base_rope and __result
  1293. //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
  1294. } else {
  1295. __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
  1296. }
  1297. if (0 != __remainder) {
  1298. __result += __remainder_rope;
  1299. }
  1300. } else {
  1301. __result = __remainder_rope;
  1302. }
  1303. _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
  1304. _M_tree_ptr._M_data->_M_ref_nonnil();
  1305. # undef __exponentiate_threshold
  1306. }
  1307. rope(const allocator_type& __a = allocator_type())
  1308. : _M_tree_ptr(__a, (_RopeRep*)0) {}
  1309. // Construct a rope from a function that can compute its members
  1310. rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
  1311. const allocator_type& __a = allocator_type())
  1312. : _M_tree_ptr(__a, (_RopeRep*)0) {
  1313. _M_tree_ptr._M_data = (0 == __len) ?
  1314. 0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
  1315. }
  1316. rope(const _Self& __x)
  1317. : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
  1318. _S_ref(_M_tree_ptr._M_data);
  1319. }
  1320. rope(__move_source<_Self> __src)
  1321. : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
  1322. __src.get()._M_tree_ptr._M_data = 0;
  1323. }
  1324. ~rope() {
  1325. _S_unref(_M_tree_ptr._M_data);
  1326. }
  1327. _Self& operator=(const _Self& __x) {
  1328. _STLP_ASSERT(get_allocator() == __x.get_allocator())
  1329. _S_ref(__x._M_tree_ptr._M_data);
  1330. _M_reset(__x._M_tree_ptr._M_data);
  1331. return *this;
  1332. }
  1333. void clear() {
  1334. _S_unref(_M_tree_ptr._M_data);
  1335. _M_tree_ptr._M_data = 0;
  1336. }
  1337. void push_back(_CharT __x) {
  1338. _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
  1339. }
  1340. void pop_back() {
  1341. _RopeRep* __old = _M_tree_ptr._M_data;
  1342. _M_tree_ptr._M_data =
  1343. _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1344. _S_unref(__old);
  1345. }
  1346. _CharT back() const {
  1347. return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
  1348. }
  1349. void push_front(_CharT __x) {
  1350. _RopeRep* __old = _M_tree_ptr._M_data;
  1351. _RopeRep* __left =
  1352. _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
  1353. _STLP_TRY {
  1354. _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
  1355. _S_unref(__old);
  1356. _S_unref(__left);
  1357. }
  1358. _STLP_UNWIND(_S_unref(__left))
  1359. }
  1360. void pop_front() {
  1361. _RopeRep* __old = _M_tree_ptr._M_data;
  1362. _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
  1363. _S_unref(__old);
  1364. }
  1365. _CharT front() const {
  1366. return _S_fetch(_M_tree_ptr._M_data, 0);
  1367. }
  1368. void balance() {
  1369. _RopeRep* __old = _M_tree_ptr._M_data;
  1370. _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
  1371. _S_unref(__old);
  1372. }
  1373. void copy(_CharT* __buffer) const {
  1374. _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
  1375. _S_flatten(_M_tree_ptr._M_data, __buffer);
  1376. }
  1377. /*
  1378. * This is the copy function from the standard, but
  1379. * with the arguments reordered to make it consistent with the
  1380. * rest of the interface.
  1381. * Note that this guaranteed not to compile if the draft standard
  1382. * order is assumed.
  1383. */
  1384. size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
  1385. size_t _p_size = size();
  1386. size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
  1387. _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
  1388. _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
  1389. return __len;
  1390. }
  1391. # ifdef _STLP_DEBUG
  1392. // Print to stdout, exposing structure. May be useful for
  1393. // performance debugging.
  1394. void dump() {
  1395. _S_dump(_M_tree_ptr._M_data);
  1396. }
  1397. # endif
  1398. // Convert to 0 terminated string in new allocated memory.
  1399. // Embedded 0s in the input do not terminate the copy.
  1400. const _CharT* c_str() const;
  1401. // As above, but also use the flattened representation as the
  1402. // the new rope representation.
  1403. const _CharT* replace_with_c_str();
  1404. // Reclaim memory for the c_str generated flattened string.
  1405. // Intentionally undocumented, since it's hard to say when this
  1406. // is safe for multiple threads.
  1407. void delete_c_str () {
  1408. if (0 == _M_tree_ptr._M_data) return;
  1409. if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
  1410. ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
  1411. _M_tree_ptr._M_data->_M_c_string) {
  1412. // Representation shared
  1413. return;
  1414. }
  1415. _M_tree_ptr._M_data->_M_free_c_string();
  1416. _M_tree_ptr._M_data->_M_c_string = 0;
  1417. }
  1418. _CharT operator[] (size_type __pos) const {
  1419. return _S_fetch(_M_tree_ptr._M_data, __pos);
  1420. }
  1421. _CharT at(size_type __pos) const {
  1422. if (__pos >= size()) _M_throw_out_of_range();
  1423. return (*this)[__pos];
  1424. }
  1425. const_iterator begin() const {
  1426. return(const_iterator(_M_tree_ptr._M_data, 0));
  1427. }
  1428. // An easy way to get a const iterator from a non-const container.
  1429. const_iterator const_begin() const {
  1430. return(const_iterator(_M_tree_ptr._M_data, 0));
  1431. }
  1432. const_iterator end() const {
  1433. return(const_iterator(_M_tree_ptr._M_data, size()));
  1434. }
  1435. const_iterator const_end() const {
  1436. return(const_iterator(_M_tree_ptr._M_data, size()));
  1437. }
  1438. size_type size() const {
  1439. return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
  1440. }
  1441. size_type length() const {
  1442. return size();
  1443. }
  1444. size_type max_size() const {
  1445. return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
  1446. // Guarantees that the result can be sufficiently
  1447. // balanced. Longer ropes will probably still work,
  1448. // but it's harder to make guarantees.
  1449. }
  1450. const_reverse_iterator rbegin() const {
  1451. return const_reverse_iterator(end());
  1452. }
  1453. const_reverse_iterator const_rbegin() const {
  1454. return const_reverse_iterator(end());
  1455. }
  1456. const_reverse_iterator rend() const {
  1457. return const_reverse_iterator(begin());
  1458. }
  1459. const_reverse_iterator const_rend() const {
  1460. return const_reverse_iterator(begin());
  1461. }
  1462. // The symmetric cases are intentionally omitted, since they're presumed
  1463. // to be less common, and we don't handle them as well.
  1464. // The following should really be templatized.
  1465. // The first argument should be an input iterator or
  1466. // forward iterator with value_type _CharT.
  1467. _Self& append(const _CharT* __iter, size_t __n) {
  1468. _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
  1469. return *this;
  1470. }
  1471. _Self& append(const _CharT* __c_string) {
  1472. size_t __len = _S_char_ptr_len(__c_string);
  1473. append(__c_string, __len);
  1474. return *this;
  1475. }
  1476. _Self& append(const _CharT* __s, const _CharT* __e) {
  1477. _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
  1478. return *this;
  1479. }
  1480. _Self& append(const_iterator __s, const_iterator __e) {
  1481. _STLP_ASSERT(__s._M_root == __e._M_root)
  1482. _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
  1483. _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
  1484. _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
  1485. return *this;
  1486. }
  1487. _Self& append(_CharT __c) {
  1488. _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
  1489. return *this;
  1490. }
  1491. _Self& append() { return append(_CharT()); } // XXX why?
  1492. _Self& append(const _Self& __y) {
  1493. _STLP_ASSERT(__y.get_allocator() == get_allocator())
  1494. _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
  1495. return *this;
  1496. }
  1497. _Self& append(size_t __n, _CharT __c) {
  1498. rope<_CharT,_Alloc> __last(__n, __c);
  1499. return append(__last);
  1500. }
  1501. void swap(_Self& __b) {
  1502. _M_tree_ptr.swap(__b._M_tree_ptr);
  1503. }
  1504. protected:
  1505. // Result is included in refcount.
  1506. static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
  1507. size_t __pos2, _RopeRep* __r) {
  1508. if (0 == __old) { _S_ref(__r); return __r; }
  1509. _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
  1510. _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
  1511. _STLP_MPWFIX_TRY //*TY 06/01/2000 -
  1512. _RopeRep* __result;
  1513. if (0 == __r) {
  1514. __result = _S_concat_rep(__left, __right);
  1515. } else {
  1516. _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
  1517. _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
  1518. __result = _S_concat_rep(__left_result, __right);
  1519. }
  1520. return __result;
  1521. _STLP_MPWFIX_CATCH //*TY 06/01/2000 -
  1522. }
  1523. public:
  1524. void insert(size_t __p, const _Self& __r) {
  1525. if (__p > size()) _M_throw_out_of_range();
  1526. _STLP_ASSERT(get_allocator() == __r.get_allocator())
  1527. _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
  1528. }
  1529. void insert(size_t __p, size_t __n, _CharT __c) {
  1530. rope<_CharT,_Alloc> __r(__n,__c);
  1531. insert(__p, __r);
  1532. }
  1533. void insert(size_t __p, const _CharT* __i, size_t __n) {
  1534. if (__p > size()) _M_throw_out_of_range();
  1535. _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
  1536. _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
  1537. _Self_destruct_ptr __left_result(
  1538. _S_concat_char_iter(__left, __i, __n));
  1539. // _S_ destr_concat_char_iter should be safe here.
  1540. // But as it stands it's probably not a win, since __left
  1541. // is likely to have additional references.
  1542. _M_reset(_S_concat_rep(__left_result, __right));
  1543. }
  1544. void insert(size_t __p, const _CharT* __c_string) {
  1545. insert(__p, __c_string, _S_char_ptr_len(__c_string));
  1546. }
  1547. void insert(size_t __p, _CharT __c) {
  1548. insert(__p, &__c, 1);
  1549. }
  1550. void insert(size_t __p) {
  1551. _CharT __c = _CharT();
  1552. insert(__p, &__c, 1);
  1553. }
  1554. void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
  1555. _Self __r(__i, __j);
  1556. insert(__p, __r);
  1557. }
  1558. void insert(size_t __p, const const_iterator& __i,
  1559. const const_iterator& __j) {
  1560. _Self __r(__i, __j);
  1561. insert(__p, __r);
  1562. }
  1563. void insert(size_t __p, const iterator& __i,
  1564. const iterator& __j) {
  1565. _Self __r(__i, __j);
  1566. insert(__p, __r);
  1567. }
  1568. // (position, length) versions of replace operations:
  1569. void replace(size_t __p, size_t __n, const _Self& __r) {
  1570. if (__p > size()) _M_throw_out_of_range();
  1571. _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
  1572. }
  1573. void replace(size_t __p, size_t __n,
  1574. const _CharT* __i, size_t __i_len) {
  1575. _Self __r(__i, __i_len);
  1576. replace(__p, __n, __r);
  1577. }
  1578. void replace(size_t __p, size_t __n, _CharT __c) {
  1579. _Self __r(__c);
  1580. replace(__p, __n, __r);
  1581. }
  1582. void replace(size_t __p, size_t __n, const _CharT* __c_string) {
  1583. _Self __r(__c_string);
  1584. replace(__p, __n, __r);
  1585. }
  1586. void replace(size_t __p, size_t __n,
  1587. const _CharT* __i, const _CharT* __j) {
  1588. _Self __r(__i, __j);
  1589. replace(__p, __n, __r);
  1590. }
  1591. void replace(size_t __p, size_t __n,
  1592. const const_iterator& __i, const const_iterator& __j) {
  1593. _Self __r(__i, __j);
  1594. replace(__p, __n, __r);
  1595. }
  1596. void replace(size_t __p, size_t __n,
  1597. const iterator& __i, const iterator& __j) {
  1598. _Self __r(__i, __j);
  1599. replace(__p, __n, __r);
  1600. }
  1601. // Single character variants:
  1602. void replace(size_t __p, _CharT __c) {
  1603. if (__p > size()) _M_throw_out_of_range();
  1604. iterator __i(this, __p);
  1605. *__i = __c;
  1606. }
  1607. void replace(size_t __p, const _Self& __r) {
  1608. replace(__p, 1, __r);
  1609. }
  1610. void replace(size_t __p, const _CharT* __i, size_t __i_len) {
  1611. replace(__p, 1, __i, __i_len);
  1612. }
  1613. void replace(size_t __p, const _CharT* __c_string) {
  1614. replace(__p, 1, __c_string);
  1615. }
  1616. void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
  1617. replace(__p, 1, __i, __j);
  1618. }
  1619. void replace(size_t __p, const const_iterator& __i,
  1620. const const_iterator& __j) {
  1621. replace(__p, 1, __i, __j);
  1622. }
  1623. void replace(size_t __p, const iterator& __i,
  1624. const iterator& __j) {
  1625. replace(__p, 1, __i, __j);
  1626. }
  1627. // Erase, (position, size) variant.
  1628. void erase(size_t __p, size_t __n) {
  1629. if (__p > size()) _M_throw_out_of_range();
  1630. _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
  1631. }
  1632. // Erase, single character
  1633. void erase(size_t __p) {
  1634. erase(__p, __p + 1);
  1635. }
  1636. // Insert, iterator variants.
  1637. iterator insert(const iterator& __p, const _Self& __r)
  1638. { insert(__p.index(), __r); return __p; }
  1639. iterator insert(const iterator& __p, size_t __n, _CharT __c)
  1640. { insert(__p.index(), __n, __c); return __p; }
  1641. iterator insert(const iterator& __p, _CharT __c)
  1642. { insert(__p.index(), __c); return __p; }
  1643. iterator insert(const iterator& __p )
  1644. { insert(__p.index()); return __p; }
  1645. iterator insert(const iterator& __p, const _CharT* c_string)
  1646. { insert(__p.index(), c_string); return __p; }
  1647. iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
  1648. { insert(__p.index(), __i, __n); return __p; }
  1649. iterator insert(const iterator& __p, const _CharT* __i,
  1650. const _CharT* __j)
  1651. { insert(__p.index(), __i, __j); return __p; }
  1652. iterator insert(const iterator& __p,
  1653. const const_iterator& __i, const const_iterator& __j)
  1654. { insert(__p.index(), __i, __j); return __p; }
  1655. iterator insert(const iterator& __p,
  1656. const iterator& __i, const iterator& __j)
  1657. { insert(__p.index(), __i, __j); return __p; }
  1658. // Replace, range variants.
  1659. void replace(const iterator& __p, const iterator& __q,
  1660. const _Self& __r)
  1661. { replace(__p.index(), __q.index() - __p.index(), __r); }
  1662. void replace(const iterator& __p, const iterator& __q, _CharT __c)
  1663. { replace(__p.index(), __q.index() - __p.index(), __c); }
  1664. void replace(const iterator& __p, const iterator& __q,
  1665. const _CharT* __c_string)
  1666. { replace(__p.index(), __q.index() - __p.index(), __c_string); }
  1667. void replace(const iterator& __p, const iterator& __q,
  1668. const _CharT* __i, size_t __n)
  1669. { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
  1670. void replace(const iterator& __p, const iterator& __q,
  1671. const _CharT* __i, const _CharT* __j)
  1672. { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1673. void replace(const iterator& __p, const iterator& __q,
  1674. const const_iterator& __i, const const_iterator& __j)
  1675. { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1676. void replace(const iterator& __p, const iterator& __q,
  1677. const iterator& __i, const iterator& __j)
  1678. { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
  1679. // Replace, iterator variants.
  1680. void replace(const iterator& __p, const _Self& __r)
  1681. { replace(__p.index(), __r); }
  1682. void replace(const iterator& __p, _CharT __c)
  1683. { replace(__p.index(), __c); }
  1684. void replace(const iterator& __p, const _CharT* __c_string)
  1685. { replace(__p.index(), __c_string); }
  1686. void replace(const iterator& __p, const _CharT* __i, size_t __n)
  1687. { replace(__p.index(), __i, __n); }
  1688. void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
  1689. { replace(__p.index(), __i, __j); }
  1690. void replace(const iterator& __p, const_iterator __i,
  1691. const_iterator __j)
  1692. { replace(__p.index(), __i, __j); }
  1693. void replace(const iterator& __p, iterator __i, iterator __j)
  1694. { replace(__p.index(), __i, __j); }
  1695. // Iterator and range variants of erase
  1696. iterator erase(const iterator& __p, const iterator& __q) {
  1697. size_t __p_index = __p.index();
  1698. erase(__p_index, __q.index() - __p_index);
  1699. return iterator(this, __p_index);
  1700. }
  1701. iterator erase(const iterator& __p) {
  1702. size_t __p_index = __p.index();
  1703. erase(__p_index, 1);
  1704. return iterator(this, __p_index);
  1705. }
  1706. _Self substr(size_t __start, size_t __len = 1) const {
  1707. if (__start > size()) _M_throw_out_of_range();
  1708. return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
  1709. }
  1710. _Self substr(iterator __start, iterator __end) const {
  1711. return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1712. }
  1713. _Self substr(iterator __start) const {
  1714. size_t __pos = __start.index();
  1715. return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1716. }
  1717. _Self substr(const_iterator __start, const_iterator __end) const {
  1718. // This might eventually take advantage of the cache in the
  1719. // iterator.
  1720. return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
  1721. }
  1722. rope<_CharT,_Alloc> substr(const_iterator __start) {
  1723. size_t __pos = __start.index();
  1724. return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
  1725. }
  1726. #include <stl/_string_npos.h>
  1727. size_type find(const _Self& __s, size_type __pos = 0) const {
  1728. if (__pos >= size())
  1729. # ifndef _STLP_OLD_ROPE_SEMANTICS
  1730. return npos;
  1731. # else
  1732. return size();
  1733. # endif
  1734. size_type __result_pos;
  1735. const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
  1736. __result_pos = __result.index();
  1737. # ifndef _STLP_OLD_ROPE_SEMANTICS
  1738. if (__result_pos == size()) __result_pos = npos;
  1739. # endif
  1740. return __result_pos;
  1741. }
  1742. size_type find(_CharT __c, size_type __pos = 0) const;
  1743. size_type find(const _CharT* __s, size_type __pos = 0) const {
  1744. size_type __result_pos;
  1745. const_iterator __result = search(const_begin() + (ptrdiff_t)__pos, const_end(),
  1746. __s, __s + _S_char_ptr_len(__s));
  1747. __result_pos = __result.index();
  1748. # ifndef _STLP_OLD_ROPE_SEMANTICS
  1749. if (__result_pos == size()) __result_pos = npos;
  1750. # endif
  1751. return __result_pos;
  1752. }
  1753. iterator mutable_begin() {
  1754. return(iterator(this, 0));
  1755. }
  1756. iterator mutable_end() {
  1757. return(iterator(this, size()));
  1758. }
  1759. reverse_iterator mutable_rbegin() {
  1760. return reverse_iterator(mutable_end());
  1761. }
  1762. reverse_iterator mutable_rend() {
  1763. return reverse_iterator(mutable_begin());
  1764. }
  1765. reference mutable_reference_at(size_type __pos) {
  1766. return reference(this, __pos);
  1767. }
  1768. # ifdef __STD_STUFF
  1769. reference operator[] (size_type __pos) {
  1770. return reference(this, __pos);
  1771. }
  1772. reference at(size_type __pos) {
  1773. if (__pos >= size()) _M_throw_out_of_range();
  1774. return (*this)[__pos];
  1775. }
  1776. void resize(size_type, _CharT) {}
  1777. void resize(size_type) {}
  1778. void reserve(size_type = 0) {}
  1779. size_type capacity() const {
  1780. return max_size();
  1781. }
  1782. // Stuff below this line is dangerous because it's error prone.
  1783. // I would really like to get rid of it.
  1784. // copy function with funny arg ordering.
  1785. size_type copy(_CharT* __buffer, size_type __n,
  1786. size_type __pos = 0) const {
  1787. return copy(__pos, __n, __buffer);
  1788. }
  1789. iterator end() { return mutable_end(); }
  1790. iterator begin() { return mutable_begin(); }
  1791. reverse_iterator rend() { return mutable_rend(); }
  1792. reverse_iterator rbegin() { return mutable_rbegin(); }
  1793. # else
  1794. const_iterator end() { return const_end(); }
  1795. const_iterator begin() { return const_begin(); }
  1796. const_reverse_iterator rend() { return const_rend(); }
  1797. const_reverse_iterator rbegin() { return const_rbegin(); }
  1798. # endif
  1799. }; //class rope
  1800. #if !defined (_STLP_STATIC_CONST_INIT_BUG)
  1801. # if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
  1802. template <class _CharT, class _Alloc>
  1803. const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
  1804. # endif
  1805. #endif
  1806. template <class _CharT, class _Alloc>
  1807. inline _CharT
  1808. _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
  1809. { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
  1810. template <class _CharT, class _Alloc>
  1811. inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1812. const _Rope_const_iterator<_CharT,_Alloc>& __y) {
  1813. return (__x._M_current_pos == __y._M_current_pos &&
  1814. __x._M_root == __y._M_root);
  1815. }
  1816. template <class _CharT, class _Alloc>
  1817. inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1818. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1819. { return (__x._M_current_pos < __y._M_current_pos); }
  1820. #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  1821. template <class _CharT, class _Alloc>
  1822. inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1823. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1824. { return !(__x == __y); }
  1825. template <class _CharT, class _Alloc>
  1826. inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1827. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1828. { return __y < __x; }
  1829. template <class _CharT, class _Alloc>
  1830. inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1831. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1832. { return !(__y < __x); }
  1833. template <class _CharT, class _Alloc>
  1834. inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1835. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1836. { return !(__x < __y); }
  1837. #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  1838. template <class _CharT, class _Alloc>
  1839. inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
  1840. const _Rope_const_iterator<_CharT,_Alloc>& __y)
  1841. { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  1842. #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
  1843. template <class _CharT, class _Alloc>
  1844. inline _Rope_const_iterator<_CharT,_Alloc>
  1845. operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  1846. { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
  1847. # endif
  1848. template <class _CharT, class _Alloc>
  1849. inline _Rope_const_iterator<_CharT,_Alloc>
  1850. operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
  1851. { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  1852. template <class _CharT, class _Alloc>
  1853. inline _Rope_const_iterator<_CharT,_Alloc>
  1854. operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
  1855. { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
  1856. template <class _CharT, class _Alloc>
  1857. inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
  1858. const _Rope_iterator<_CharT,_Alloc>& __y) {
  1859. return (__x._M_current_pos == __y._M_current_pos &&
  1860. __x._M_root_rope == __y._M_root_rope);
  1861. }
  1862. template <class _CharT, class _Alloc>
  1863. inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
  1864. const _Rope_iterator<_CharT,_Alloc>& __y)
  1865. { return (__x._M_current_pos < __y._M_current_pos); }
  1866. #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
  1867. template <class _CharT, class _Alloc>
  1868. inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
  1869. const _Rope_iterator<_CharT,_Alloc>& __y)
  1870. { return !(__x == __y); }
  1871. template <class _CharT, class _Alloc>
  1872. inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
  1873. const _Rope_iterator<_CharT,_Alloc>& __y)
  1874. { return __y < __x; }
  1875. template <class _CharT, class _Alloc>
  1876. inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
  1877. const _Rope_iterator<_CharT,_Alloc>& __y)
  1878. { return !(__y < __x); }
  1879. template <class _CharT, class _Alloc>
  1880. inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
  1881. const _Rope_iterator<_CharT,_Alloc>& __y)
  1882. { return !(__x < __y); }
  1883. #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  1884. template <class _CharT, class _Alloc>
  1885. inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  1886. const _Rope_iterator<_CharT,_Alloc>& __y)
  1887. { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
  1888. #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000 // dwa 8/21/97 - "ambiguous access to overloaded function" bug.
  1889. template <class _CharT, class _Alloc>
  1890. inline _Rope_iterator<_CharT,_Alloc>
  1891. operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
  1892. ptrdiff_t __n) {
  1893. return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
  1894. }
  1895. # endif
  1896. template <class _CharT, class _Alloc>
  1897. inline _Rope_iterator<_CharT,_Alloc>
  1898. operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
  1899. ptrdiff_t __n) {
  1900. return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  1901. }
  1902. template <class _CharT, class _Alloc>
  1903. inline _Rope_iterator<_CharT,_Alloc>
  1904. operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
  1905. return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
  1906. }
  1907. template <class _CharT, class _Alloc>
  1908. inline rope<_CharT,_Alloc>
  1909. operator+ (const rope<_CharT,_Alloc>& __left,
  1910. const rope<_CharT,_Alloc>& __right) {
  1911. _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
  1912. return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
  1913. // Inlining this should make it possible to keep __left and __right in registers.
  1914. }
  1915. template <class _CharT, class _Alloc>
  1916. inline rope<_CharT,_Alloc>&
  1917. operator+= (rope<_CharT,_Alloc>& __left,
  1918. const rope<_CharT,_Alloc>& __right) {
  1919. __left.append(__right);
  1920. return __left;
  1921. }
  1922. template <class _CharT, class _Alloc>
  1923. inline rope<_CharT,_Alloc>
  1924. operator+ (const rope<_CharT,_Alloc>& __left,
  1925. const _CharT* __right) {
  1926. size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
  1927. return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
  1928. }
  1929. template <class _CharT, class _Alloc>
  1930. inline rope<_CharT,_Alloc>&
  1931. operator+= (rope<_CharT,_Alloc>& __left,
  1932. const _CharT* __right) {
  1933. __left.append(__right);
  1934. return __left;
  1935. }
  1936. template <class _CharT, class _Alloc>
  1937. inline rope<_CharT,_Alloc>
  1938. operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
  1939. return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
  1940. }
  1941. template <class _CharT, class _Alloc>
  1942. inline rope<_CharT,_Alloc>&
  1943. operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
  1944. __left.append(__right);
  1945. return __left;
  1946. }
  1947. template <class _CharT, class _Alloc>
  1948. inline bool
  1949. operator< (const rope<_CharT,_Alloc>& __left,
  1950. const rope<_CharT,_Alloc>& __right) {
  1951. return __left.compare(__right) < 0;
  1952. }
  1953. template <class _CharT, class _Alloc>
  1954. inline bool
  1955. operator== (const rope<_CharT,_Alloc>& __left,
  1956. const rope<_CharT,_Alloc>& __right) {
  1957. return __left.compare(__right) == 0;
  1958. }
  1959. #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
  1960. template <class _CharT, class _Alloc>
  1961. inline bool
  1962. operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  1963. return !(__x == __y);
  1964. }
  1965. template <class _CharT, class _Alloc>
  1966. inline bool
  1967. operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  1968. return __y < __x;
  1969. }
  1970. template <class _CharT, class _Alloc>
  1971. inline bool
  1972. operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  1973. return !(__y < __x);
  1974. }
  1975. template <class _CharT, class _Alloc>
  1976. inline bool
  1977. operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
  1978. return !(__x < __y);
  1979. }
  1980. template <class _CharT, class _Alloc>
  1981. inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  1982. const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  1983. return !(__x == __y);
  1984. }
  1985. #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
  1986. template <class _CharT, class _Alloc>
  1987. inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
  1988. const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
  1989. return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
  1990. }
  1991. #if !defined (_STLP_USE_NO_IOSTREAMS)
  1992. template<class _CharT, class _Traits, class _Alloc>
  1993. basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
  1994. const rope<_CharT, _Alloc>& __r);
  1995. #endif
  1996. typedef rope<char, _STLP_DEFAULT_ALLOCATOR(char) > crope;
  1997. #if defined (_STLP_HAS_WCHAR_T)
  1998. typedef rope<wchar_t, _STLP_DEFAULT_ALLOCATOR(wchar_t) > wrope;
  1999. #endif
  2000. inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
  2001. { return __c.mutable_reference_at(__i); }
  2002. #if defined (_STLP_HAS_WCHAR_T)
  2003. inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
  2004. { return __c.mutable_reference_at(__i); }
  2005. #endif
  2006. #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
  2007. template <class _CharT, class _Alloc>
  2008. inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
  2009. { __x.swap(__y); }
  2010. #else
  2011. inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
  2012. # ifdef _STLP_HAS_WCHAR_T // dwa 8/21/97
  2013. inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
  2014. # endif
  2015. #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
  2016. // Hash functions should probably be revisited later:
  2017. _STLP_TEMPLATE_NULL struct hash<crope> {
  2018. size_t operator()(const crope& __str) const {
  2019. size_t _p_size = __str.size();
  2020. if (0 == _p_size) return 0;
  2021. return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2022. }
  2023. };
  2024. #if defined (_STLP_HAS_WCHAR_T) // dwa 8/21/97
  2025. _STLP_TEMPLATE_NULL struct hash<wrope> {
  2026. size_t operator()(const wrope& __str) const {
  2027. size_t _p_size = __str.size();
  2028. if (0 == _p_size) return 0;
  2029. return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
  2030. }
  2031. };
  2032. #endif
  2033. #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
  2034. // I couldn't get this to work with VC++
  2035. template<class _CharT,class _Alloc>
  2036. # if defined (__DMC__) && !defined (__PUT_STATIC_DATA_MEMBERS_HERE)
  2037. extern
  2038. # endif
  2039. void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
  2040. _Rope_iterator<_CharT, _Alloc> __middle,
  2041. _Rope_iterator<_CharT, _Alloc> __last);
  2042. inline void rotate(_Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __first,
  2043. _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __middle,
  2044. _Rope_iterator<char, _STLP_DEFAULT_ALLOCATOR(char) > __last)
  2045. { _Rope_rotate(__first, __middle, __last); }
  2046. #endif
  2047. template <class _CharT, class _Alloc>
  2048. inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
  2049. if (_M_current_valid) {
  2050. return _M_current;
  2051. } else {
  2052. return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
  2053. }
  2054. }
  2055. #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION)
  2056. template <class _CharT, class _Alloc>
  2057. struct __move_traits<rope<_CharT, _Alloc> > {
  2058. typedef __stlp_movable implemented;
  2059. //Completness depends on the allocator:
  2060. typedef typename __move_traits<_Alloc>::complete complete;
  2061. };
  2062. #endif
  2063. _STLP_END_NAMESPACE
  2064. #if !defined (_STLP_LINK_TIME_INSTANTIATION)
  2065. # include <stl/_rope.c>
  2066. #endif
  2067. #endif /* _STLP_INTERNAL_ROPE_H */
  2068. // Local Variables:
  2069. // mode:C++
  2070. // End: