PageRenderTime 109ms CodeModel.GetById 14ms app.highlight 82ms RepoModel.GetById 1ms app.codeStats 0ms

/project/jni/stlport/stlport/stl/_rope.h

https://github.com/aichunyu/FFPlayer
C Header | 2374 lines | 1723 code | 304 blank | 347 comment | 133 complexity | 4dc8c97b4cf11125c61b0022351b463f MD5 | raw file

Large files files are truncated, but you can click here to view the full file

   1/*
   2 *
   3 * Copyright (c) 1996,1997
   4 * Silicon Graphics Computer Systems, Inc.
   5 *
   6 * Copyright (c) 1997
   7 * Moscow Center for SPARC Technology
   8 *
   9 * Copyright (c) 1999
  10 * Boris Fomitchev
  11 *
  12 * This material is provided "as is", with absolutely no warranty expressed
  13 * or implied. Any use is at your own risk.
  14 *
  15 * Permission to use or copy this software for any purpose is hereby granted
  16 * without fee, provided the above notices are retained on all copies.
  17 * Permission to modify the code and to distribute modified code is granted,
  18 * provided the above notices are retained, and a notice that the code was
  19 * modified is included with the above copyright notice.
  20 *
  21 */
  22
  23/* NOTE: This is an internal header file, included by other STL headers.
  24 *   You should not attempt to use it directly.
  25 */
  26
  27// rope<_CharT,_Alloc> is a sequence of _CharT.
  28// Ropes appear to be mutable, but update operations
  29// really copy enough of the data structure to leave the original
  30// valid.  Thus ropes can be logically copied by just copying
  31// a pointer value.
  32
  33#ifndef _STLP_INTERNAL_ROPE_H
  34#define _STLP_INTERNAL_ROPE_H
  35
  36#ifndef _STLP_INTERNAL_ALGOBASE_H
  37#  include <stl/_algobase.h>
  38#endif
  39
  40#ifndef _STLP_IOSFWD
  41#  include <iosfwd>
  42#endif
  43
  44#ifndef _STLP_INTERNAL_ALLOC_H
  45#  include <stl/_alloc.h>
  46#endif
  47
  48#ifndef _STLP_INTERNAL_ITERATOR_H
  49#  include <stl/_iterator.h>
  50#endif
  51
  52#ifndef _STLP_INTERNAL_ALGO_H
  53#  include <stl/_algo.h>
  54#endif
  55
  56#ifndef _STLP_INTERNAL_FUNCTION_BASE_H
  57#  include <stl/_function_base.h>
  58#endif
  59
  60#ifndef _STLP_INTERNAL_NUMERIC_H
  61#  include <stl/_numeric.h>
  62#endif
  63
  64#ifndef _STLP_INTERNAL_HASH_FUN_H
  65#  include <stl/_hash_fun.h>
  66#endif
  67
  68#ifndef _STLP_CHAR_TRAITS_H
  69#  include <stl/char_traits.h>
  70#endif
  71
  72#ifndef _STLP_INTERNAL_THREADS_H
  73#  include <stl/_threads.h>
  74#endif
  75
  76#ifdef _STLP_SGI_THREADS
  77#  include <mutex.h>
  78#endif
  79
  80#ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
  81#  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
  82#elif defined(__MRC__)||defined(__SC__)
  83#  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create<_Tp,__atype>(__a,(_Tp*)0)
  84#else
  85#  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
  86#endif
  87
  88_STLP_BEGIN_NAMESPACE
  89
  90// First a lot of forward declarations.  The standard seems to require
  91// much stricter "declaration before use" than many of the implementations
  92// that preceded it.
  93template<class _CharT, _STLP_DEFAULT_ALLOCATOR_SELECT(_CharT) > class rope;
  94template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
  95template<class _CharT, class _Alloc> struct _Rope_RopeRep;
  96template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
  97template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
  98template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
  99template<class _CharT, class _Alloc> class _Rope_iterator;
 100template<class _CharT, class _Alloc> class _Rope_const_iterator;
 101template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
 102template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
 103
 104_STLP_MOVE_TO_PRIV_NAMESPACE
 105
 106// Some helpers, so we can use the power algorithm on ropes.
 107// See below for why this isn't local to the implementation.
 108
 109// This uses a nonstandard refcount convention.
 110// The result has refcount 0.
 111template<class _CharT, class _Alloc>
 112struct _Rope_Concat_fn
 113  : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
 114                           rope<_CharT,_Alloc> > {
 115  rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
 116                                  const rope<_CharT,_Alloc>& __y) {
 117    return __x + __y;
 118  }
 119};
 120
 121template <class _CharT, class _Alloc>
 122inline
 123rope<_CharT,_Alloc>
 124__identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
 125{ return rope<_CharT,_Alloc>(); }
 126
 127_STLP_MOVE_TO_STD_NAMESPACE
 128
 129// Store an eos
 130template <class _CharT>
 131inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
 132{ *__p = 0; }
 133
 134template <class _CharT>
 135inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
 136{ _STLP_STD::_Construct(__p); }
 137
 138template <class _CharT>
 139inline void _S_construct_null(_CharT *__p) {
 140  typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
 141  _S_construct_null_aux(__p, _Char_Is_Integral());
 142}
 143
 144// char_producers are logically functions that generate a section of
 145// a string.  These can be converted to ropes.  The resulting rope
 146// invokes the char_producer on demand.  This allows, for example,
 147// files to be viewed as ropes without reading the entire file.
 148template <class _CharT>
 149class char_producer {
 150public:
 151  virtual ~char_producer() {}
 152  virtual void operator()(size_t __start_pos, size_t __len,
 153                          _CharT* __buffer) = 0;
 154  // Buffer should really be an arbitrary output iterator.
 155  // That way we could flatten directly into an ostream, etc.
 156  // This is thoroughly impossible, since iterator types don't
 157  // have runtime descriptions.
 158};
 159
 160// Sequence buffers:
 161//
 162// Sequence must provide an append operation that appends an
 163// array to the sequence.  Sequence buffers are useful only if
 164// appending an entire array is cheaper than appending element by element.
 165// This is true for many string representations.
 166// This should  perhaps inherit from ostream<sequence::value_type>
 167// and be implemented correspondingly, so that they can be used
 168// for formatted.  For the sake of portability, we don't do this yet.
 169//
 170// For now, sequence buffers behave as output iterators.  But they also
 171// behave a little like basic_ostringstream<sequence::value_type> and a
 172// little like containers.
 173
 174template<class _Sequence
 175# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
 176       defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
 177         , size_t _Buf_sz = 100
 178#   if defined(__sgi) && !defined(__GNUC__)
 179#   define __TYPEDEF_WORKAROUND
 180         ,class _V = typename _Sequence::value_type
 181#   endif /* __sgi */
 182# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
 183         >
 184// The 3rd parameter works around a common compiler bug.
 185class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
 186public:
 187# ifndef __TYPEDEF_WORKAROUND
 188  typedef typename _Sequence::value_type value_type;
 189  typedef sequence_buffer<_Sequence
 190# if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
 191       defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
 192  , _Buf_sz
 193  > _Self;
 194# else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
 195  > _Self;
 196  enum { _Buf_sz = 100};
 197# endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
 198  // # endif
 199# else /* __TYPEDEF_WORKAROUND */
 200  typedef _V value_type;
 201  typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
 202# endif /* __TYPEDEF_WORKAROUND */
 203protected:
 204  _Sequence* _M_prefix;
 205  value_type _M_buffer[_Buf_sz];
 206  size_t     _M_buf_count;
 207public:
 208  void flush() {
 209    _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
 210    _M_buf_count = 0;
 211  }
 212  ~sequence_buffer() { flush(); }
 213  sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
 214  sequence_buffer(const _Self& __x) {
 215    _M_prefix = __x._M_prefix;
 216    _M_buf_count = __x._M_buf_count;
 217    copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
 218  }
 219  sequence_buffer(_Self& __x) {
 220    __x.flush();
 221    _M_prefix = __x._M_prefix;
 222    _M_buf_count = 0;
 223  }
 224  sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
 225  _Self& operator= (_Self& __x) {
 226    __x.flush();
 227    _M_prefix = __x._M_prefix;
 228    _M_buf_count = 0;
 229    return *this;
 230  }
 231  _Self& operator= (const _Self& __x) {
 232    _M_prefix = __x._M_prefix;
 233    _M_buf_count = __x._M_buf_count;
 234    copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
 235    return *this;
 236  }
 237  void push_back(value_type __x) {
 238    if (_M_buf_count < _Buf_sz) {
 239      _M_buffer[_M_buf_count] = __x;
 240      ++_M_buf_count;
 241    } else {
 242      flush();
 243      _M_buffer[0] = __x;
 244      _M_buf_count = 1;
 245    }
 246  }
 247  void append(const value_type *__s, size_t __len) {
 248    if (__len + _M_buf_count <= _Buf_sz) {
 249      size_t __i = _M_buf_count;
 250      size_t __j = 0;
 251      for (; __j < __len; __i++, __j++) {
 252        _M_buffer[__i] = __s[__j];
 253      }
 254      _M_buf_count += __len;
 255    } else if (0 == _M_buf_count) {
 256      _M_prefix->append(__s, __s + __len);
 257    } else {
 258      flush();
 259      append(__s, __len);
 260    }
 261  }
 262  _Self& write(const value_type *__s, size_t __len) {
 263    append(__s, __len);
 264    return *this;
 265  }
 266  _Self& put(value_type __x) {
 267    push_back(__x);
 268    return *this;
 269  }
 270  _Self& operator=(const value_type& __rhs) {
 271    push_back(__rhs);
 272    return *this;
 273  }
 274  _Self& operator*() { return *this; }
 275  _Self& operator++() { return *this; }
 276  _Self& operator++(int) { return *this; }
 277};
 278
 279// The following should be treated as private, at least for now.
 280template<class _CharT>
 281class _Rope_char_consumer {
 282#if !defined (_STLP_MEMBER_TEMPLATES)
 283public:
 284  //Without member templates we have to use run-time parameterization.
 285  // The symmetry with char_producer is accidental and temporary.
 286  virtual ~_Rope_char_consumer() {}
 287  virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
 288#endif
 289};
 290
 291//
 292// What follows should really be local to rope.  Unfortunately,
 293// that doesn't work, since it makes it impossible to define generic
 294// equality on rope iterators.  According to the draft standard, the
 295// template parameters for such an equality operator cannot be inferred
 296// from the occurence of a member class as a parameter.
 297// (SGI compilers in fact allow this, but the __result wouldn't be
 298// portable.)
 299// Similarly, some of the static member functions are member functions
 300// only to avoid polluting the global namespace, and to circumvent
 301// restrictions on type inference for template functions.
 302//
 303
 304//
 305// The internal data structure for representing a rope.  This is
 306// private to the implementation.  A rope is really just a pointer
 307// to one of these.
 308//
 309// A few basic functions for manipulating this data structure
 310// are members of _RopeRep.  Most of the more complex algorithms
 311// are implemented as rope members.
 312//
 313// Some of the static member functions of _RopeRep have identically
 314// named functions in rope that simply invoke the _RopeRep versions.
 315//
 316
 317template<class _CharT, class _Alloc>
 318struct _Rope_RopeRep
 319  : public _Refcount_Base
 320{
 321  typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
 322public:
 323  //
 324  // GAB: 11/09/05
 325  //
 326  // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
 327  // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
 328  // but this addition causes the sunpro compiler to complain about
 329  // multiple declarations during the initialization of "_S_min_len".
 330  // Changed to be a fixed value and the sunpro compiler appears to
 331  // be happy???
 332  //
 333#  define __ROPE_MAX_DEPTH  45
 334#  define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
 335  enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
 336  enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
 337  // Apparently needed by VC++
 338  // The data fields of leaves are allocated with some
 339  // extra space, to accomodate future growth and for basic
 340  // character types, to hold a trailing eos character.
 341  enum { _S_alloc_granularity = 8 };
 342
 343  _Tag _M_tag:8;
 344  bool _M_is_balanced:8;
 345
 346  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
 347  typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type allocator_type;
 348
 349  allocator_type get_allocator() const { return allocator_type(_M_size);  }
 350
 351  unsigned char _M_depth;
 352  _CharT* _STLP_VOLATILE _M_c_string;
 353  _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
 354
 355# ifdef _STLP_NO_ARROW_OPERATOR
 356  _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {}
 357# endif
 358
 359  /* Flattened version of string, if needed.  */
 360  /* typically 0.                             */
 361  /* If it's not 0, then the memory is owned  */
 362  /* by this node.                            */
 363  /* In the case of a leaf, this may point to */
 364  /* the same memory as the data field.       */
 365  _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
 366                allocator_type __a) :
 367    _Refcount_Base(1),
 368    _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size)
 369  { }
 370
 371  typedef typename _AreSameUnCVTypes<_CharT, char>::_Ret _IsChar;
 372# ifdef _STLP_HAS_WCHAR_T
 373  typedef typename _AreSameUnCVTypes<_CharT, wchar_t>::_Ret _IsWCharT;
 374# else
 375  typedef __false_type _IsWCharT;
 376# endif
 377
 378  typedef typename _Lor2<_IsChar, _IsWCharT>::_Ret _IsBasicCharType;
 379
 380#if 0
 381  /* Please tell why this code is necessary if you uncomment it.
 382   * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
 383   * returns a size > n in order to store the terminating null charater. When
 384   * instanciation type is not a char or wchar_t this is not guaranty resulting in
 385   * memory overrun.
 386   */
 387  static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
 388    // Allow slop for in-place expansion.
 389    return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
 390  }
 391
 392  static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
 393    // Allow slop for in-place expansion.
 394    return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
 395  }
 396#endif
 397  // fbp : moved from RopeLeaf
 398  static size_t _S_rounded_up_size(size_t __n)
 399  //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
 400  { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
 401
 402  static void _S_free_string( _CharT* __s, size_t __len,
 403                             allocator_type __a) {
 404    _STLP_STD::_Destroy_Range(__s, __s + __len);
 405    //  This has to be a static member, so this gets a bit messy
 406#   ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
 407    __a.deallocate(__s, _S_rounded_up_size(__len));    //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
 408#   else
 409    __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
 410#   endif
 411  }
 412
 413  // Deallocate data section of a leaf.
 414  // This shouldn't be a member function.
 415  // But its hard to do anything else at the
 416  // moment, because it's templatized w.r.t.
 417  // an allocator.
 418  // Does nothing if __GC is defined.
 419  void _M_free_c_string();
 420  void _M_free_tree();
 421  // Deallocate t. Assumes t is not 0.
 422  void _M_unref_nonnil() {
 423    if (_M_decr() == 0) _M_free_tree();
 424  }
 425  void _M_ref_nonnil() {
 426    _M_incr();
 427  }
 428  static void _S_unref(_Self* __t) {
 429    if (0 != __t) {
 430      __t->_M_unref_nonnil();
 431    }
 432  }
 433  static void _S_ref(_Self* __t) {
 434    if (0 != __t) __t->_M_incr();
 435  }
 436  //static void _S_free_if_unref(_Self* __t) {
 437  //  if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
 438  //}
 439};
 440
 441template<class _CharT, class _Alloc>
 442struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
 443public:
 444  _CharT* _M_data; /* Not necessarily 0 terminated. */
 445                                /* The allocated size is         */
 446                                /* _S_rounded_up_size(size), except */
 447                                /* in the GC case, in which it   */
 448                                /* doesn't matter.               */
 449private:
 450  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 451  typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
 452  void _M_init(__true_type const& /*_IsBasicCharType*/) {
 453    this->_M_c_string = _M_data;
 454  }
 455  void _M_init(__false_type const& /*_IsBasicCharType*/) {}
 456
 457public:
 458  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
 459  typedef typename _RopeRep::allocator_type allocator_type;
 460
 461  _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
 462    : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
 463      _M_data(__d) {
 464    _STLP_ASSERT(_p_size > 0)
 465    _M_init(_IsBasicCharType());
 466  }
 467
 468# ifdef _STLP_NO_ARROW_OPERATOR
 469  _Rope_RopeLeaf() {}
 470  _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
 471# endif
 472
 473// The constructor assumes that d has been allocated with
 474  // the proper allocator and the properly padded size.
 475  // In contrast, the destructor deallocates the data:
 476  ~_Rope_RopeLeaf() {
 477    if (_M_data != this->_M_c_string) {
 478      this->_M_free_c_string();
 479    }
 480    _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
 481  }
 482};
 483
 484template<class _CharT, class _Alloc>
 485struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
 486private:
 487  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 488
 489public:
 490  _RopeRep* _M_left;
 491  _RopeRep* _M_right;
 492  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
 493  typedef typename _RopeRep::allocator_type allocator_type;
 494  _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
 495    : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
 496                                   (max)(__l->_M_depth, __r->_M_depth) + 1, false,
 497                                   __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
 498  {}
 499# ifdef _STLP_NO_ARROW_OPERATOR
 500  _Rope_RopeConcatenation() {}
 501  _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
 502# endif
 503
 504  ~_Rope_RopeConcatenation() {
 505    this->_M_free_c_string();
 506    _M_left->_M_unref_nonnil();
 507    _M_right->_M_unref_nonnil();
 508  }
 509};
 510
 511template <class _CharT, class _Alloc>
 512struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
 513private:
 514  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 515public:
 516  char_producer<_CharT>* _M_fn;
 517  /*
 518   * Char_producer is owned by the
 519   * rope and should be explicitly
 520   * deleted when the rope becomes
 521   * inaccessible.
 522   */
 523  bool _M_delete_when_done;
 524  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
 525  typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
 526# ifdef _STLP_NO_ARROW_OPERATOR
 527  _Rope_RopeFunction() {}
 528  _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
 529# endif
 530
 531  _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
 532                     bool __d, allocator_type __a)
 533    : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
 534    , _M_delete_when_done(__d)
 535  { _STLP_ASSERT(_p_size > 0) }
 536
 537  ~_Rope_RopeFunction() {
 538    this->_M_free_c_string();
 539    if (_M_delete_when_done) {
 540      delete _M_fn;
 541    }
 542  }
 543};
 544
 545/*
 546 * Substring results are usually represented using just
 547 * concatenation nodes.  But in the case of very long flat ropes
 548 * or ropes with a functional representation that isn't practical.
 549 * In that case, we represent the __result as a special case of
 550 * RopeFunction, whose char_producer points back to the rope itself.
 551 * In all cases except repeated substring operations and
 552 * deallocation, we treat the __result as a RopeFunction.
 553 */
 554template<class _CharT, class _Alloc>
 555struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
 556public:
 557  // XXX this whole class should be rewritten.
 558  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 559  _RopeRep *_M_base;      // not 0
 560  size_t _M_start;
 561  /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
 562                                _CharT* __buffer) {
 563    typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
 564    typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
 565    switch (_M_base->_M_tag) {
 566    case _RopeRep::_S_function:
 567    case _RopeRep::_S_substringfn:
 568      {
 569        char_producer<_CharT>* __fn =
 570          __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
 571        _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
 572        _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
 573        (*__fn)(__start_pos + _M_start, __req_len, __buffer);
 574      }
 575      break;
 576    case _RopeRep::_S_leaf:
 577      {
 578        _CharT* __s =
 579          __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
 580        _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
 581      }
 582      break;
 583    default:
 584      _STLP_ASSERT(false)
 585        ;
 586    }
 587  }
 588
 589  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
 590  typedef typename _RopeRep::allocator_type allocator_type;
 591
 592  _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
 593    : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
 594      _M_base(__b), _M_start(__s) {
 595    _STLP_ASSERT(__l > 0)
 596    _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
 597    _M_base->_M_ref_nonnil();
 598    this->_M_tag = _RopeRep::_S_substringfn;
 599  }
 600  virtual ~_Rope_RopeSubstring()
 601  { _M_base->_M_unref_nonnil(); }
 602};
 603
 604/*
 605 * Self-destructing pointers to Rope_rep.
 606 * These are not conventional smart pointers.  Their
 607 * only purpose in life is to ensure that unref is called
 608 * on the pointer either at normal exit or if an exception
 609 * is raised.  It is the caller's responsibility to
 610 * adjust reference counts when these pointers are initialized
 611 * or assigned to.  (This convention significantly reduces
 612 * the number of potentially expensive reference count
 613 * updates.)
 614 */
 615template<class _CharT, class _Alloc>
 616struct _Rope_self_destruct_ptr {
 617  _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
 618  ~_Rope_self_destruct_ptr()
 619  { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
 620#   ifdef _STLP_USE_EXCEPTIONS
 621  _Rope_self_destruct_ptr() : _M_ptr(0) {}
 622#   else
 623  _Rope_self_destruct_ptr() {}
 624#   endif
 625  _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
 626  _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
 627  _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
 628  operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
 629  _Rope_self_destruct_ptr<_CharT, _Alloc>&
 630  operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
 631  { _M_ptr = __x; return *this; }
 632};
 633
 634/*
 635 * Dereferencing a nonconst iterator has to return something
 636 * that behaves almost like a reference.  It's not possible to
 637 * return an actual reference since assignment requires extra
 638 * work.  And we would get into the same problems as with the
 639 * CD2 version of basic_string.
 640 */
 641template<class _CharT, class _Alloc>
 642class _Rope_char_ref_proxy {
 643  typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
 644  friend class rope<_CharT,_Alloc>;
 645  friend class _Rope_iterator<_CharT,_Alloc>;
 646  friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
 647  typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
 648  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 649  typedef rope<_CharT,_Alloc> _My_rope;
 650  size_t _M_pos;
 651  _CharT _M_current;
 652  bool _M_current_valid;
 653  _My_rope* _M_root;     // The whole rope.
 654public:
 655  _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
 656    _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
 657  _Rope_char_ref_proxy(const _Self& __x) :
 658    _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
 659  // Don't preserve cache if the reference can outlive the
 660  // expression.  We claim that's not possible without calling
 661  // a copy constructor or generating reference to a proxy
 662  // reference.  We declare the latter to have undefined semantics.
 663  _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
 664    : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
 665  inline operator _CharT () const;
 666  _Self& operator= (_CharT __c);
 667  _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
 668  _Self& operator= (const _Self& __c) {
 669    return operator=((_CharT)__c);
 670  }
 671};
 672
 673#ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
 674template<class _CharT, class __Alloc>
 675inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
 676                 _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
 677  _CharT __tmp = __a;
 678  __a = __b;
 679  __b = __tmp;
 680}
 681#else
 682// There is no really acceptable way to handle this.  The default
 683// definition of swap doesn't work for proxy references.
 684// It can't really be made to work, even with ugly hacks, since
 685// the only unusual operation it uses is the copy constructor, which
 686// is needed for other purposes.  We provide a macro for
 687// full specializations, and instantiate the most common case.
 688# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
 689    inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
 690                     _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
 691        _CharT __tmp = __a; \
 692        __a = __b; \
 693        __b = __tmp; \
 694    }
 695
 696_ROPE_SWAP_SPECIALIZATION(char,_STLP_DEFAULT_ALLOCATOR(char) )
 697
 698# ifndef _STLP_NO_WCHAR_T
 699_ROPE_SWAP_SPECIALIZATION(wchar_t,_STLP_DEFAULT_ALLOCATOR(wchar_t) )
 700# endif
 701
 702#endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
 703
 704template<class _CharT, class _Alloc>
 705class _Rope_char_ptr_proxy {
 706  // XXX this class should be rewritten.
 707public:
 708  typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
 709  friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
 710  size_t _M_pos;
 711  rope<_CharT,_Alloc>* _M_root;     // The whole rope.
 712
 713  _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
 714    : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
 715  _Rope_char_ptr_proxy(const _Self& __x)
 716    : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
 717  _Rope_char_ptr_proxy() {}
 718  _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
 719    _STLP_ASSERT(0 == __x)
 720  }
 721  _Self& operator= (const _Self& __x) {
 722    _M_pos = __x._M_pos;
 723    _M_root = __x._M_root;
 724    return *this;
 725  }
 726
 727  _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
 728    return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
 729  }
 730};
 731
 732
 733/*
 734 * Rope iterators:
 735 * Unlike in the C version, we cache only part of the stack
 736 * for rope iterators, since they must be efficiently copyable.
 737 * When we run out of cache, we have to reconstruct the iterator
 738 * value.
 739 * Pointers from iterators are not included in reference counts.
 740 * Iterators are assumed to be thread private.  Ropes can
 741 * be shared.
 742 */
 743template<class _CharT, class _Alloc>
 744class _Rope_iterator_base
 745/*   : public random_access_iterator<_CharT, ptrdiff_t>  */
 746{
 747  friend class rope<_CharT,_Alloc>;
 748  typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
 749  typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
 750public:
 751  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 752
 753  enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
 754  enum { _S_iterator_buf_len = 15 };
 755  size_t _M_current_pos;
 756  // The whole rope.
 757  _RopeRep* _M_root;
 758  // Starting position for current leaf
 759  size_t _M_leaf_pos;
 760  // Buffer possibly containing current char.
 761  _CharT* _M_buf_start;
 762  // Pointer to current char in buffer, != 0 ==> buffer valid.
 763  _CharT* _M_buf_ptr;
 764  // One past __last valid char in buffer.
 765  _CharT* _M_buf_end;
 766
 767  // What follows is the path cache.  We go out of our
 768  // way to make this compact.
 769  // Path_end contains the bottom section of the path from
 770  // the root to the current leaf.
 771  struct {
 772#  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
 773    _RopeRep const*_M_data[4];
 774#  else
 775    _RopeRep const*_M_data[_S_path_cache_len];
 776#  endif
 777  } _M_path_end;
 778  // Last valid __pos in path_end;
 779  // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
 780  // point to concatenation nodes.
 781  int _M_leaf_index;
 782  // (_M_path_directions >> __i) & 1 is 1
 783  // if we got from _M_path_end[leaf_index - __i - 1]
 784  // to _M_path_end[leaf_index - __i] by going to the
 785  // __right. Assumes path_cache_len <= 9.
 786  unsigned char _M_path_directions;
 787  // Short buffer for surrounding chars.
 788  // This is useful primarily for
 789  // RopeFunctions.  We put the buffer
 790  // here to avoid locking in the
 791  // multithreaded case.
 792  // The cached path is generally assumed to be valid
 793  // only if the buffer is valid.
 794  struct {
 795#  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
 796    _CharT _M_data[15];
 797#  else
 798    _CharT _M_data[_S_iterator_buf_len];
 799#  endif
 800  } _M_tmp_buf;
 801
 802  // Set buffer contents given path cache.
 803  static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
 804  // Set buffer contents and path cache.
 805  static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
 806  // As above, but assumes path cache is valid for previous posn.
 807  static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
 808  _Rope_iterator_base() {}
 809  _Rope_iterator_base(_RopeRep* __root, size_t __pos)
 810    : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
 811  void _M_incr(size_t __n);
 812  void _M_decr(size_t __n);
 813public:
 814  size_t index() const { return _M_current_pos; }
 815private:
 816  void _M_copy_buf(const _Self& __x) {
 817    _M_tmp_buf = __x._M_tmp_buf;
 818    if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
 819      _M_buf_start = _M_tmp_buf._M_data;
 820      _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
 821      _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
 822    } else {
 823      _M_buf_end = __x._M_buf_end;
 824    }
 825  }
 826
 827public:
 828  _Rope_iterator_base(const _Self& __x) : 
 829      _M_current_pos(__x._M_current_pos),
 830      _M_root(__x._M_root),
 831      _M_leaf_pos( __x._M_leaf_pos ),
 832      _M_buf_start(__x._M_buf_start),
 833      _M_buf_ptr(__x._M_buf_ptr),
 834      _M_path_end(__x._M_path_end),
 835      _M_leaf_index(__x._M_leaf_index),
 836      _M_path_directions(__x._M_path_directions)
 837      {
 838        if (0 != __x._M_buf_ptr) {
 839          _M_copy_buf(__x);
 840        }
 841      }
 842  _Self& operator = (const _Self& __x)
 843      {
 844        _M_current_pos = __x._M_current_pos;
 845        _M_root = __x._M_root;
 846        _M_buf_start = __x._M_buf_start;
 847        _M_buf_ptr = __x._M_buf_ptr;
 848        _M_path_end = __x._M_path_end;
 849        _M_leaf_index = __x._M_leaf_index;
 850        _M_path_directions = __x._M_path_directions;
 851        _M_leaf_pos = __x._M_leaf_pos;
 852        if (0 != __x._M_buf_ptr) {
 853          _M_copy_buf(__x);
 854        }
 855        return *this;
 856      }
 857};
 858
 859template<class _CharT, class _Alloc> class _Rope_iterator;
 860
 861template<class _CharT, class _Alloc>
 862class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
 863  friend class rope<_CharT,_Alloc>;
 864  typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
 865  typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
 866  //  protected:
 867public:
 868#   ifndef _STLP_HAS_NO_NAMESPACES
 869  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 870  // The one from the base class may not be directly visible.
 871#   endif
 872  _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
 873    _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
 874    // Only nonconst iterators modify root ref count
 875  {}
 876public:
 877  typedef _CharT reference;   // Really a value.  Returning a reference
 878                              // Would be a mess, since it would have
 879                              // to be included in refcount.
 880  typedef const _CharT* pointer;
 881  typedef _CharT value_type;
 882  typedef ptrdiff_t difference_type;
 883  typedef random_access_iterator_tag iterator_category;
 884
 885public:
 886  _Rope_const_iterator() {}
 887  _Rope_const_iterator(const _Self& __x) :
 888    _Rope_iterator_base<_CharT,_Alloc>(__x) { }
 889  _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
 890    _Rope_iterator_base<_CharT,_Alloc>(__x) {}
 891  _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
 892    _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
 893  _Self& operator= (const _Self& __x) {
 894    _Base::operator=(__x);
 895    return *this;
 896  }
 897  reference operator*() {
 898    if (0 == this->_M_buf_ptr)
 899#if !defined (__DMC__)
 900      _S_setcache(*this);
 901#else
 902    { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
 903#endif
 904    return *(this->_M_buf_ptr);
 905  }
 906  _Self& operator++() {
 907    _CharT* __next;
 908    if (0 != this->_M_buf_ptr && (__next = this->_M_buf_ptr + 1) < this->_M_buf_end) {
 909      this->_M_buf_ptr = __next;
 910      ++this->_M_current_pos;
 911    } else {
 912      this->_M_incr(1);
 913    }
 914    return *this;
 915  }
 916  _Self& operator+=(ptrdiff_t __n) {
 917    if (__n >= 0) {
 918      this->_M_incr(__n);
 919    } else {
 920      this->_M_decr(-__n);
 921    }
 922    return *this;
 923  }
 924  _Self& operator--() {
 925    this->_M_decr(1);
 926    return *this;
 927  }
 928  _Self& operator-=(ptrdiff_t __n) {
 929    if (__n >= 0) {
 930      this->_M_decr(__n);
 931    } else {
 932      this->_M_incr(-__n);
 933    }
 934    return *this;
 935  }
 936  _Self operator++(int) {
 937    size_t __old_pos = this->_M_current_pos;
 938    this->_M_incr(1);
 939    return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
 940    // This makes a subsequent dereference expensive.
 941    // Perhaps we should instead copy the iterator
 942    // if it has a valid cache?
 943  }
 944  _Self operator--(int) {
 945    size_t __old_pos = this->_M_current_pos;
 946    this->_M_decr(1);
 947    return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
 948  }
 949  inline reference operator[](size_t __n);
 950};
 951
 952template<class _CharT, class _Alloc>
 953class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
 954  friend class rope<_CharT,_Alloc>;
 955  typedef _Rope_iterator<_CharT, _Alloc> _Self;
 956  typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
 957  typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
 958
 959public:
 960  rope<_CharT,_Alloc>* _M_root_rope;
 961  // root is treated as a cached version of this,
 962  // and is used to detect changes to the underlying
 963  // rope.
 964  // Root is included in the reference count.
 965  // This is necessary so that we can detect changes reliably.
 966  // Unfortunately, it requires careful bookkeeping for the
 967  // nonGC case.
 968  _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
 969
 970  void _M_check();
 971public:
 972  typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
 973  typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
 974  typedef _CharT value_type;
 975  typedef ptrdiff_t difference_type;
 976  typedef random_access_iterator_tag iterator_category;
 977public:
 978  ~_Rope_iterator() {  //*TY 5/6/00 - added dtor to balance reference count
 979    _RopeRep::_S_unref(this->_M_root);
 980  }
 981
 982  rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
 983  _Rope_iterator() {
 984    this->_M_root = 0;  // Needed for reference counting.
 985  }
 986  _Rope_iterator(const  _Self& __x) :
 987    _Rope_iterator_base<_CharT,_Alloc>(__x) {
 988    _M_root_rope = __x._M_root_rope;
 989    _RopeRep::_S_ref(this->_M_root);
 990  }
 991  _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
 992  _Self& operator= (const  _Self& __x) {
 993    _RopeRep* __old = this->_M_root;
 994    _RopeRep::_S_ref(__x._M_root);
 995    _Base::operator=(__x);
 996    _M_root_rope = __x._M_root_rope;
 997    _RopeRep::_S_unref(__old);
 998    return *this;
 999  }
1000  reference operator*() {
1001    _M_check();
1002    if (0 == this->_M_buf_ptr) {
1003      return reference(_M_root_rope, this->_M_current_pos);
1004    } else {
1005      return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
1006    }
1007  }
1008  _Self& operator++() {
1009    this->_M_incr(1);
1010    return *this;
1011  }
1012  _Self& operator+=(ptrdiff_t __n) {
1013    if (__n >= 0) {
1014      this->_M_incr(__n);
1015    } else {
1016      this->_M_decr(-__n);
1017    }
1018    return *this;
1019  }
1020  _Self& operator--() {
1021    this->_M_decr(1);
1022    return *this;
1023  }
1024  _Self& operator-=(ptrdiff_t __n) {
1025    if (__n >= 0) {
1026      this->_M_decr(__n);
1027    } else {
1028      this->_M_incr(-__n);
1029    }
1030    return *this;
1031  }
1032  _Self operator++(int) {
1033    size_t __old_pos = this->_M_current_pos;
1034    this->_M_incr(1);
1035    return _Self(_M_root_rope, __old_pos);
1036  }
1037  _Self operator--(int) {
1038    size_t __old_pos = this->_M_current_pos;
1039    this->_M_decr(1);
1040    return _Self(_M_root_rope, __old_pos);
1041  }
1042  reference operator[](ptrdiff_t __n) {
1043    return reference(_M_root_rope, this->_M_current_pos + __n);
1044  }
1045};
1046
1047# ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
1048template <class _CharT, class _Alloc>
1049inline random_access_iterator_tag
1050iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
1051template <class _CharT, class _Alloc>
1052inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1053template <class _CharT, class _Alloc>
1054inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
1055template <class _CharT, class _Alloc>
1056inline random_access_iterator_tag
1057iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
1058template <class _CharT, class _Alloc>
1059inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1060template <class _CharT, class _Alloc>
1061inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
1062#endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
1063
1064template <class _CharT, class _Alloc, class _CharConsumer>
1065bool _S_apply_to_pieces(_CharConsumer& __c,
1066                        _Rope_RopeRep<_CharT, _Alloc> *__r,
1067                        size_t __begin, size_t __end);
1068                        // begin and end are assumed to be in range.
1069
1070template <class _CharT, class _Alloc>
1071class rope
1072#if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
1073           : public __stlport_class<rope<_CharT, _Alloc> >
1074#endif
1075{
1076  typedef rope<_CharT,_Alloc> _Self;
1077public:
1078  typedef _CharT value_type;
1079  typedef ptrdiff_t difference_type;
1080  typedef size_t size_type;
1081  typedef _CharT const_reference;
1082  typedef const _CharT* const_pointer;
1083  typedef _Rope_iterator<_CharT,_Alloc> iterator;
1084  typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1085  typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1086  typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1087
1088  friend class _Rope_iterator<_CharT,_Alloc>;
1089  friend class _Rope_const_iterator<_CharT,_Alloc>;
1090  friend struct _Rope_RopeRep<_CharT,_Alloc>;
1091  friend class _Rope_iterator_base<_CharT,_Alloc>;
1092  friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1093  friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1094  friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1095
1096  _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
1097
1098protected:
1099  typedef _CharT* _Cstrptr;
1100
1101  static _CharT _S_empty_c_str[1];
1102
1103  enum { _S_copy_max = 23 };
1104  // For strings shorter than _S_copy_max, we copy to
1105  // concatenate.
1106
1107  typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1108  typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
1109
1110public:
1111  _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
1112  typedef typename _Alloc_traits<_CharT,_Alloc>::allocator_type  allocator_type;
1113
1114public:
1115  // The only data member of a rope:
1116  _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
1117
1118public:
1119  allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
1120
1121public:
1122  typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1123  typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1124  typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1125  typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1126
1127  // Retrieve a character at the indicated position.
1128  static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1129
1130  // Obtain a pointer to the character at the indicated position.
1131  // The pointer can be used to change the character.
1132  // If such a pointer cannot be produced, as is frequently the
1133  // case, 0 is returned instead.
1134  // (Returns nonzero only if all nodes in the path have a refcount
1135  // of 1.)
1136  static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1137
1138  static void _S_unref(_RopeRep* __t) {
1139    _RopeRep::_S_unref(__t);
1140  }
1141  static void _S_ref(_RopeRep* __t) {
1142    _RopeRep::_S_ref(__t);
1143  }
1144
1145  typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1146
1147  // _Result is counted in refcount.
1148  static _RopeRep* _S_substring(_RopeRep* __base,
1149                                size_t __start, size_t __endp1);
1150
1151  static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1152                                       const _CharT* __iter, size_t __slen);
1153  // Concatenate rope and char ptr, copying __s.
1154  // Should really take an arbitrary iterator.
1155  // Result is counted in refcount.
1156  static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1157                                             const _CharT* __iter, size_t __slen);
1158    // As above, but one reference to __r is about to be
1159    // destroyed.  Thus the pieces may be recycled if all
1160    // relevent reference counts are 1.
1161
1162  // General concatenation on _RopeRep.  _Result
1163  // has refcount of 1.  Adjusts argument refcounts.
1164  static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
1165
1166public:
1167#if defined (_STLP_MEMBER_TEMPLATES)
1168  template <class _CharConsumer>
1169#else
1170  typedef _Rope_char_consumer<_CharT> _CharConsumer;
1171#endif
1172  void apply_to_pieces(size_t __begin, size_t __end,
1173                       _CharConsumer& __c) const
1174  { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
1175
1176protected:
1177
1178  static size_t _S_rounded_up_size(size_t __n)
1179  { return _RopeRep::_S_rounded_up_size(__n); }
1180
1181  // Allocate and construct a RopeLeaf using the supplied allocator
1182  // Takes ownership of s instead of copying.
1183  static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
1184                                    size_t _p_size, allocator_type __a) {
1185    _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1186                                                _RopeLeaf).allocate(1);
1187    _STLP_TRY {
1188      _STLP_PLACEMENT_NEW(__space) _RopeLeaf(__s, _p_size, __a);
1189    }
1190   _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
1191                                       _RopeLeaf).deallocate(__space, 1))
1192    return __space;
1193  }
1194
1195  static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
1196                                                      allocator_type __a) {
1197   _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1198                                                        _RopeConcatenation).allocate(1);
1199    return _STLP_PLACEMENT_NEW(__space) _RopeConcatenation(__left, __right, __a);
1200  }
1201
1202  static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1203                                            size_t _p_size, bool __d, allocator_type __a) {
1204   _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1205                                                   _RopeFunction).allocate(1);
1206    return _STLP_PLACEMENT_NEW(__space) _RopeFunction(__f, _p_size, __d, __a);
1207  }
1208
1209  static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1210                                              size_t __l, allocator_type __a) {
1211   _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
1212                                                    _RopeSubstring).allocate(1);
1213    return _STLP_PLACEMENT_NEW(__space) _RopeSubstring(__b, __s, __l, __a);
1214  }
1215
1216  static
1217  _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1218                                               size_t _p_size, allocator_type __a) {
1219    if (0 == _p_size) return 0;
1220
1221   _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
1222
1223    _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
1224    _S_construct_null(__buf + _p_size);
1225
1226    _STLP_TRY {
1227      return _S_new_RopeLeaf(__buf, _p_size, __a);
1228    }
1229    _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
1230    _STLP_RET_AFTER_THROW(0)
1231  }
1232
1233
1234  // Concatenation of nonempty strings.
1235  // Always builds a concatenation node.
1236  // Rebalances if the result is too deep.
1237  // Result has refcount 1.
1238  // Does not increment left and right ref counts even though
1239  // they are referenced.
1240  static _RopeRep*
1241  _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1242
1243  // Concatenation helper functions
1244  static _RopeLeaf*
1245  _S_leaf_concat_char_iter(_RopeLeaf* __r,
1246                           const _CharT* __iter, size_t __slen);
1247  // Concatenate by copying leaf.
1248  // should take an arbitrary iterator
1249  // result has refcount 1.
1250  static _RopeLeaf* _S_destr_leaf_concat_char_iter
1251  (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1252  // A version that potentially clobbers __r if __r->_M_ref_count == 1.
1253
1254
1255  // A helper function for exponentiating strings.
1256  // This uses a nonstandard refcount convention.
1257  // The result has refcount 0.
1258  typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1259#if !defined (__GNUC__) || (__GNUC__ < 3)
1260  friend _Concat_fn;
1261#else
1262  friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
1263#endif
1264
1265public:
1266  static size_t _S_char_ptr_len(const _CharT* __s) {
1267    return char_traits<_CharT>::length(__s);
1268  }
1269
1270public: /* for operators */
1271  rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1272    : _M_tree_ptr(__a, __t) { }
1273private:
1274  // Copy __r to the _CharT buffer.
1275  // Returns __buffer + __r->_M_size._M_data.
1276  // Assumes that buffer is uninitialized.
1277  static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1278
1279  // Again, with explicit starting position and length.
1280  // Assumes that buffer is uninitialized.
1281  static _CharT* _S_flatten(_RopeRep* __r,
1282                            size_t __start, size_t __len,
1283                            _CharT* __buffer);
1284
1285  // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
1286public:
1287  static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
1288protected:
1289  static bool _S_is_balanced(_RopeRep* __r)
1290  { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
1291
1292  static bool _S_is_almost_balanced(_RopeRep* __r) {
1293    return (__r->_M_depth == 0 ||
1294            __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
1295  }
1296
1297  static bool _S_is_roughly_balanced(_RopeRep* __r) {
1298    return (__r->_M_depth <= 1 ||
1299            __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
1300  }
1301
1302  // Assumes the result is not empty.
1303  static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1304                                              _RopeRep* __right) {
1305    _RopeRep* __result = _S_concat_rep(__left, __right);
1306    if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1307    return __result;
1308  }
1309
1310  // The basic rebalancing operation.  Logically copies the
1311  // rope.  The result has refcount of 1.  The client will
1312  // usually decrement the reference count of __r.
1313  // The result is within height 2 of balanced by the above
1314  // definition.
1315  static _RopeRep* _S_balance(_RopeRep* __r);
1316
1317  // Add all unbalanced subtrees to the forest of balanceed trees.
1318  // Used only by balance.
1319  static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1320
1321  // Add __r to forest, assuming __r is already balanced.
1322  static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1323
1324#ifdef _STLP_DEBUG
1325  // Print to stdout, exposing structure
1326  static void _S_dump(_RopeRep* __r, int __indent = 0);
1327#endif
1328
1329  // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1330  static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1331
1332  void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
1333
1334  void _M_reset(_RopeRep* __r) {
1335    //if (__r != _M_tree_ptr._M_data) {
1336      _S_unref(_M_tree_ptr._M_data);
1337      _M_tree_ptr._M_data = __r;
1338    //}
1339  }
1340
1341public:
1342  bool empty() const { return 0 == _M_tree_ptr._M_data; }
1343
1344  // Comparison member function.  This is public only for those
1345  // clients that need a ternary comparison.  Others
1346  // should use the comparison operators below.
1347  int compare(const _Self& __y) const {
1348    return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
1349  }
1350
1351  rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1352    : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
1353  {}
1354
1355  rope(const _CharT* __s, size_t __len,
1356       const allocator_type& __a = allocator_type())
1357    : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
1358  {}
1359
1360  // Should perhaps be templatized with respect to the iterator type
1361  // and use Sequence_buffer.  (It should perhaps use sequence_buffer
1362  // even now.)
1363  rope(const _CharT *__s, const _CharT *__e,
1364       const allocator_type& __a = allocator_type())
1365    : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
1366  {}
1367
1368  rope(const const_iterator& __s, const const_iterator& __e,
1369       const allocator_type& __a = allocator_type())
1370    : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1371                                    __e._M_current_pos))
1372  {}
1373
1374  rope(const iterator& __s, const iterator& __e,
1375       const allocator_type& __a = allocator_type())
1376    : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
1377                                    __e._M_current_pos))
1378  {}
1379
1380  rope(_CharT __c, const allocator_type& __a = allocator_type())
1381    : _M_tree_ptr(__a, (_RopeRep*)0) {
1382    _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
1383
1384    _Copy_Construct(__buf, __c);
1385    _S_construct_null(__buf + 1);
1386
1387    _STLP_TRY {
1388      _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
1389    }
1390    _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
1391  }
1392
1393  rope(size_t __n, _CharT __c,
1394       const allocator_type& __a = allocator_type()):
1395    _M_tree_ptr(__a, (_RopeRep*)0) {
1396    if (0 == __n)
1397      return;
1398
1399    rope<_CharT,_Alloc> __result;
1400# define  __exponentiate_threshold size_t(32)
1401    _RopeRep* __remainder;
1402    rope<_CharT,_Alloc> __remainder_rope;
1403
1404    // gcc-2.7.2 bugs
1405    typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
1406
1407    size_t __exponent = __n / __exponentiate_threshold;
1408    size_t __rest = __n % __exponentiate_threshold;
1409    if (0 == __rest) {
1410      __remainder = 0;
1411    } else {
1412      _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
1413      uninitialized_fill_n(__rest_buffer, __rest, __c);
1414      _S_construct_null(__rest_buffer + __rest);
1415      _STLP_TRY {
1416        __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
1417      }
1418      _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
1419    }
1420    __remainder_rope._M_tree_ptr._M_data = __remainder;
1421    if (__exponent != 0) {
1422      _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
1423      _RopeLeaf* __base_leaf;
1424      rope<_CharT,_Alloc> __base_rope;
1425      uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
1426      _S_construct_null(__base_buffer + __exponentiate_threshold);
1427      _STLP_TRY {
1428        __base_leaf = _S_new_RopeLeaf(__base_buffer,
1429                                      __exponentiate_threshold, __a);
1430      }
1431      _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
1432                                            __exponentiate_threshold, __a))
1433      __base_rope._M_tree_ptr._M_data = __base_leaf;
1434      if (1 == __exponent) {
1435        __result = __base_rope;
1436        // One each for ba

Large files files are truncated, but you can click here to view the full file