stl_alloc.h

Go to the documentation of this file.
00001 // Allocators -*- C++ -*-
00002 
00003 // Copyright (C) 2001, 2002 Free Software Foundation, Inc.
00004 //
00005 // This file is part of the GNU ISO C++ Library.  This library is free
00006 // software; you can redistribute it and/or modify it under the
00007 // terms of the GNU General Public License as published by the
00008 // Free Software Foundation; either version 2, or (at your option)
00009 // any later version.
00010 
00011 // This library is distributed in the hope that it will be useful,
00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of
00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00014 // GNU General Public License for more details.
00015 
00016 // You should have received a copy of the GNU General Public License along
00017 // with this library; see the file COPYING.  If not, write to the Free
00018 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
00019 // USA.
00020 
00021 // As a special exception, you may use this file as part of a free software
00022 // library without restriction.  Specifically, if other files instantiate
00023 // templates or use macros or inline functions from this file, or you compile
00024 // this file and link it with other files to produce an executable, this
00025 // file does not by itself cause the resulting executable to be covered by
00026 // the GNU General Public License.  This exception does not however
00027 // invalidate any other reasons why the executable file might be covered by
00028 // the GNU General Public License.
00029 
00030 /*
00031  * Copyright (c) 1996-1997
00032  * Silicon Graphics Computer Systems, Inc.
00033  *
00034  * Permission to use, copy, modify, distribute and sell this software
00035  * and its documentation for any purpose is hereby granted without fee,
00036  * provided that the above copyright notice appear in all copies and
00037  * that both that copyright notice and this permission notice appear
00038  * in supporting documentation.  Silicon Graphics makes no
00039  * representations about the suitability of this software for any
00040  * purpose.  It is provided "as is" without express or implied warranty.
00041  */
00042 
00048 #ifndef __GLIBCPP_INTERNAL_ALLOC_H
00049 #define __GLIBCPP_INTERNAL_ALLOC_H
00050 
00085 #include <cstddef>
00086 #include <cstdlib>
00087 #include <cstring>
00088 #include <cassert>
00089 #include <bits/functexcept.h>   // For __throw_bad_alloc
00090 #include <bits/stl_threads.h>
00091 
00092 #include <bits/atomicity.h>
00093 
00094 namespace std
00095 {
00104   class __new_alloc
00105   {
00106   public:
00107     static void*
00108     allocate(size_t __n)
00109     { return ::operator new(__n); }
00110 
00111     static void
00112     deallocate(void* __p, size_t)
00113     { ::operator delete(__p); }
00114   };
00115 
00116 
00127   template<int __inst>
00128     class __malloc_alloc_template
00129     {
00130     private:
00131       static void* _S_oom_malloc(size_t);
00132 #ifdef _GLIBCPP_DEPRECATED
00133       static void* _S_oom_realloc(void*, size_t);
00134 #endif
00135       static void (* __malloc_alloc_oom_handler)();
00136 
00137     public:
00138       static void*
00139       allocate(size_t __n)
00140       {
00141         void* __result = malloc(__n);
00142         if (__builtin_expect(__result == 0, 0))
00143       __result = _S_oom_malloc(__n);
00144         return __result;
00145       }
00146 
00147       static void
00148       deallocate(void* __p, size_t /* __n */)
00149       { free(__p); }
00150 
00151 #ifdef _GLIBCPP_DEPRECATED
00152       static void*
00153       reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
00154       {
00155         void* __result = realloc(__p, __new_sz);
00156         if (__builtin_expect(__result == 0, 0))
00157           __result = _S_oom_realloc(__p, __new_sz);
00158         return __result;
00159       }
00160 #endif
00161 
00162       static void (* __set_malloc_handler(void (*__f)()))()
00163       {
00164         void (* __old)() = __malloc_alloc_oom_handler;
00165         __malloc_alloc_oom_handler = __f;
00166         return __old;
00167       }
00168     };
00169 
00170   // malloc_alloc out-of-memory handling
00171   template<int __inst>
00172     void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
00173 
00174   template<int __inst>
00175     void*
00176     __malloc_alloc_template<__inst>::
00177     _S_oom_malloc(size_t __n)
00178     {
00179       void (* __my_malloc_handler)();
00180       void* __result;
00181 
00182       for (;;)
00183         {
00184           __my_malloc_handler = __malloc_alloc_oom_handler;
00185           if (__builtin_expect(__my_malloc_handler == 0, 0))
00186             __throw_bad_alloc();
00187           (*__my_malloc_handler)();
00188           __result = malloc(__n);
00189           if (__result)
00190             return __result;
00191         }
00192     }
00193 
00194 #ifdef _GLIBCPP_DEPRECATED
00195   template<int __inst>
00196     void*
00197     __malloc_alloc_template<__inst>::
00198     _S_oom_realloc(void* __p, size_t __n)
00199     {
00200       void (* __my_malloc_handler)();
00201       void* __result;
00202 
00203       for (;;)
00204         {
00205           __my_malloc_handler = __malloc_alloc_oom_handler;
00206           if (__builtin_expect(__my_malloc_handler == 0, 0))
00207             __throw_bad_alloc();
00208           (*__my_malloc_handler)();
00209           __result = realloc(__p, __n);
00210           if (__result)
00211             return __result;
00212         }
00213     }
00214 #endif
00215 
00216   // Should not be referenced within the library anymore.
00217   typedef __new_alloc                 __mem_interface;
00218 
00230   template<typename _Tp, typename _Alloc>
00231     class __simple_alloc
00232     {
00233     public:
00234       static _Tp*
00235       allocate(size_t __n)
00236       {
00237     _Tp* __ret = 0;
00238     if (__n)
00239       __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
00240     return __ret;
00241       }
00242   
00243       static _Tp*
00244       allocate()
00245       { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
00246   
00247       static void
00248       deallocate(_Tp* __p, size_t __n)
00249       { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
00250   
00251       static void
00252       deallocate(_Tp* __p)
00253       { _Alloc::deallocate(__p, sizeof (_Tp)); }
00254     };
00255 
00256 
00271   template<typename _Alloc>
00272     class __debug_alloc
00273     {
00274     private:
00275       // Size of space used to store size.  Note that this must be
00276       // large enough to preserve alignment.
00277       enum {_S_extra = 8};
00278 
00279     public:
00280       static void*
00281       allocate(size_t __n)
00282       {
00283         char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
00284         *(size_t*)__result = __n;
00285         return __result + (int) _S_extra;
00286       }
00287 
00288       static void
00289       deallocate(void* __p, size_t __n)
00290       {
00291         char* __real_p = (char*)__p - (int) _S_extra;
00292         assert(*(size_t*)__real_p == __n);
00293         _Alloc::deallocate(__real_p, __n + (int) _S_extra);
00294       }
00295 
00296 #ifdef _GLIBCPP_DEPRECATED
00297       static void*
00298       reallocate(void* __p, size_t __old_sz, size_t __new_sz)
00299       {
00300         char* __real_p = (char*)__p - (int) _S_extra;
00301         assert(*(size_t*)__real_p == __old_sz);
00302         char* __result = (char*) _Alloc::reallocate(__real_p, 
00303                             __old_sz + (int) _S_extra,
00304                             __new_sz + (int) _S_extra);
00305         *(size_t*)__result = __new_sz;
00306         return __result + (int) _S_extra;
00307       }
00308 #endif
00309     };
00310 
00311 
00342   template<bool __threads, int __inst>
00343     class __default_alloc_template
00344     {
00345     private:
00346       enum {_ALIGN = 8};
00347       enum {_MAX_BYTES = 128};
00348       enum {_NFREELISTS = _MAX_BYTES / _ALIGN};
00349 
00350       union _Obj
00351       {
00352         union _Obj* _M_free_list_link;
00353         char        _M_client_data[1];    // The client sees this.
00354       };
00355 
00356       static _Obj* volatile         _S_free_list[_NFREELISTS];
00357 
00358       // Chunk allocation state.
00359       static char*                  _S_start_free;
00360       static char*                  _S_end_free;
00361       static size_t                 _S_heap_size;
00362 
00363       static _STL_mutex_lock        _S_node_allocator_lock;
00364 
00365       static size_t
00366       _S_round_up(size_t __bytes)
00367       { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
00368 
00369       static size_t
00370       _S_freelist_index(size_t __bytes)
00371       { return (((__bytes) + (size_t)_ALIGN - 1)/(size_t)_ALIGN - 1); }
00372 
00373       // Returns an object of size __n, and optionally adds to size __n
00374       // free list.
00375       static void*
00376       _S_refill(size_t __n);
00377 
00378       // Allocates a chunk for nobjs of size size.  nobjs may be reduced
00379       // if it is inconvenient to allocate the requested number.
00380       static char*
00381       _S_chunk_alloc(size_t __size, int& __nobjs);
00382 
00383       // It would be nice to use _STL_auto_lock here.  But we need a
00384       // test whether threads are in use.
00385       struct _Lock
00386       {
00387         _Lock() { if (__threads) _S_node_allocator_lock._M_acquire_lock(); }
00388         ~_Lock() { if (__threads) _S_node_allocator_lock._M_release_lock(); }
00389       } __attribute__ ((__unused__));
00390       friend struct _Lock;
00391 
00392       static _Atomic_word _S_force_new;
00393 
00394     public:
00395       // __n must be > 0
00396       static void*
00397       allocate(size_t __n)
00398       {
00399     void* __ret = 0;
00400 
00401     // If there is a race through here, assume answer from getenv
00402     // will resolve in same direction.  Inspired by techniques
00403     // to efficiently support threading found in basic_string.h.
00404     if (_S_force_new == 0)
00405       {
00406         if (getenv("GLIBCPP_FORCE_NEW"))
00407           __atomic_add(&_S_force_new, 1);
00408         else
00409           __atomic_add(&_S_force_new, -1);
00410         // Trust but verify...
00411         assert(_S_force_new != 0);
00412       }
00413 
00414     if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0))
00415       __ret = __new_alloc::allocate(__n);
00416     else
00417       {
00418         _Obj* volatile* __my_free_list = _S_free_list
00419           + _S_freelist_index(__n);
00420         // Acquire the lock here with a constructor call.  This
00421         // ensures that it is released in exit or during stack
00422         // unwinding.
00423         _Lock __lock_instance;
00424         _Obj* __restrict__ __result = *__my_free_list;
00425         if (__builtin_expect(__result == 0, 0))
00426           __ret = _S_refill(_S_round_up(__n));
00427         else
00428           {
00429         *__my_free_list = __result -> _M_free_list_link;
00430         __ret = __result;
00431           }     
00432         if (__builtin_expect(__ret == 0, 0))
00433           __throw_bad_alloc();
00434       }
00435     return __ret;
00436       }
00437 
00438       // __p may not be 0
00439       static void
00440       deallocate(void* __p, size_t __n)
00441       {
00442     if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0))
00443       __new_alloc::deallocate(__p, __n);
00444     else
00445       {
00446         _Obj* volatile*  __my_free_list = _S_free_list
00447           + _S_freelist_index(__n);
00448         _Obj* __q = (_Obj*)__p;
00449 
00450         // Acquire the lock here with a constructor call.  This
00451         // ensures that it is released in exit or during stack
00452         // unwinding.
00453         _Lock __lock_instance;
00454         __q -> _M_free_list_link = *__my_free_list;
00455         *__my_free_list = __q;
00456       }
00457       }
00458 
00459 #ifdef _GLIBCPP_DEPRECATED
00460       static void*
00461       reallocate(void* __p, size_t __old_sz, size_t __new_sz);
00462 #endif
00463     };
00464 
00465   template<bool __threads, int __inst> _Atomic_word
00466   __default_alloc_template<__threads, __inst>::_S_force_new = 0;
00467 
00468   template<bool __threads, int __inst>
00469     inline bool
00470     operator==(const __default_alloc_template<__threads,__inst>&,
00471                const __default_alloc_template<__threads,__inst>&)
00472     { return true; }
00473 
00474   template<bool __threads, int __inst>
00475     inline bool
00476     operator!=(const __default_alloc_template<__threads,__inst>&,
00477                const __default_alloc_template<__threads,__inst>&)
00478     { return false; }
00479 
00480 
00481   // We allocate memory in large chunks in order to avoid fragmenting the
00482   // heap too much.  We assume that __size is properly aligned.  We hold
00483   // the allocation lock.
00484   template<bool __threads, int __inst>
00485     char*
00486     __default_alloc_template<__threads, __inst>::
00487     _S_chunk_alloc(size_t __size, int& __nobjs)
00488     {
00489       char* __result;
00490       size_t __total_bytes = __size * __nobjs;
00491       size_t __bytes_left = _S_end_free - _S_start_free;
00492 
00493       if (__bytes_left >= __total_bytes)
00494         {
00495           __result = _S_start_free;
00496           _S_start_free += __total_bytes;
00497           return __result ;
00498         }
00499       else if (__bytes_left >= __size)
00500         {
00501           __nobjs = (int)(__bytes_left/__size);
00502           __total_bytes = __size * __nobjs;
00503           __result = _S_start_free;
00504           _S_start_free += __total_bytes;
00505           return __result;
00506         }
00507       else
00508         {
00509           size_t __bytes_to_get =
00510             2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
00511           // Try to make use of the left-over piece.
00512           if (__bytes_left > 0)
00513             {
00514               _Obj* volatile* __my_free_list =
00515                 _S_free_list + _S_freelist_index(__bytes_left);
00516 
00517               ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
00518               *__my_free_list = (_Obj*)_S_start_free;
00519             }
00520           _S_start_free = (char*) __new_alloc::allocate(__bytes_to_get);
00521           if (_S_start_free == 0)
00522             {
00523               size_t __i;
00524               _Obj* volatile* __my_free_list;
00525               _Obj* __p;
00526               // Try to make do with what we have.  That can't hurt.  We
00527               // do not try smaller requests, since that tends to result
00528               // in disaster on multi-process machines.
00529               __i = __size;
00530               for (; __i <= (size_t) _MAX_BYTES; __i += (size_t) _ALIGN)
00531                 {
00532                   __my_free_list = _S_free_list + _S_freelist_index(__i);
00533                   __p = *__my_free_list;
00534                   if (__p != 0)
00535                     {
00536                       *__my_free_list = __p -> _M_free_list_link;
00537                       _S_start_free = (char*)__p;
00538                       _S_end_free = _S_start_free + __i;
00539                       return _S_chunk_alloc(__size, __nobjs);
00540                       // Any leftover piece will eventually make it to the
00541                       // right free list.
00542                     }
00543                 }
00544               _S_end_free = 0;        // In case of exception.
00545               _S_start_free = (char*)__new_alloc::allocate(__bytes_to_get);
00546               // This should either throw an exception or remedy the situation.
00547               // Thus we assume it succeeded.
00548             }
00549           _S_heap_size += __bytes_to_get;
00550           _S_end_free = _S_start_free + __bytes_to_get;
00551           return _S_chunk_alloc(__size, __nobjs);
00552         }
00553     }
00554 
00555 
00556   // Returns an object of size __n, and optionally adds to "size
00557   // __n"'s free list.  We assume that __n is properly aligned.  We
00558   // hold the allocation lock.
00559   template<bool __threads, int __inst>
00560     void*
00561     __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
00562     {
00563       int __nobjs = 20;
00564       char* __chunk = _S_chunk_alloc(__n, __nobjs);
00565       _Obj* volatile* __my_free_list;
00566       _Obj* __result;
00567       _Obj* __current_obj;
00568       _Obj* __next_obj;
00569       int __i;
00570 
00571       if (1 == __nobjs)
00572         return __chunk;
00573       __my_free_list = _S_free_list + _S_freelist_index(__n);
00574 
00575       // Build free list in chunk.
00576       __result = (_Obj*)__chunk;
00577       *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
00578       for (__i = 1; ; __i++)
00579         {
00580       __current_obj = __next_obj;
00581           __next_obj = (_Obj*)((char*)__next_obj + __n);
00582       if (__nobjs - 1 == __i)
00583         {
00584           __current_obj -> _M_free_list_link = 0;
00585           break;
00586         }
00587       else
00588         __current_obj -> _M_free_list_link = __next_obj;
00589     }
00590       return __result;
00591     }
00592 
00593 
00594 #ifdef _GLIBCPP_DEPRECATED
00595   template<bool threads, int inst>
00596     void*
00597     __default_alloc_template<threads, inst>::
00598     reallocate(void* __p, size_t __old_sz, size_t __new_sz)
00599     {
00600       void* __result;
00601       size_t __copy_sz;
00602 
00603       if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES)
00604         return(realloc(__p, __new_sz));
00605       if (_S_round_up(__old_sz) == _S_round_up(__new_sz))
00606         return(__p);
00607       __result = allocate(__new_sz);
00608       __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
00609       memcpy(__result, __p, __copy_sz);
00610       deallocate(__p, __old_sz);
00611       return __result;
00612     }
00613 #endif
00614 
00615   template<bool __threads, int __inst>
00616     _STL_mutex_lock
00617     __default_alloc_template<__threads,__inst>::_S_node_allocator_lock
00618     __STL_MUTEX_INITIALIZER;
00619 
00620   template<bool __threads, int __inst>
00621     char* __default_alloc_template<__threads,__inst>::_S_start_free = 0;
00622 
00623   template<bool __threads, int __inst>
00624     char* __default_alloc_template<__threads,__inst>::_S_end_free = 0;
00625 
00626   template<bool __threads, int __inst>
00627     size_t __default_alloc_template<__threads,__inst>::_S_heap_size = 0;
00628 
00629   template<bool __threads, int __inst>
00630     typename __default_alloc_template<__threads,__inst>::_Obj* volatile
00631     __default_alloc_template<__threads,__inst>::_S_free_list[_NFREELISTS];
00632 
00633   typedef __default_alloc_template<true,0>    __alloc;
00634   typedef __default_alloc_template<false,0>   __single_client_alloc;
00635 
00636 
00651   template<typename _Tp>
00652     class allocator
00653     {
00654       typedef __alloc _Alloc;          // The underlying allocator.
00655     public:
00656       typedef size_t     size_type;
00657       typedef ptrdiff_t  difference_type;
00658       typedef _Tp*       pointer;
00659       typedef const _Tp* const_pointer;
00660       typedef _Tp&       reference;
00661       typedef const _Tp& const_reference;
00662       typedef _Tp        value_type;
00663 
00664       template<typename _Tp1>
00665         struct rebind
00666         { typedef allocator<_Tp1> other; };
00667 
00668       allocator() throw() {}
00669       allocator(const allocator&) throw() {}
00670       template<typename _Tp1>
00671         allocator(const allocator<_Tp1>&) throw() {}
00672       ~allocator() throw() {}
00673 
00674       pointer
00675       address(reference __x) const { return &__x; }
00676 
00677       const_pointer
00678       address(const_reference __x) const { return &__x; }
00679 
00680       // NB: __n is permitted to be 0.  The C++ standard says nothing
00681       // about what the return value is when __n == 0.
00682       _Tp*
00683       allocate(size_type __n, const void* = 0)
00684       {
00685     _Tp* __ret = 0;
00686     if (__n)
00687       {
00688         if (__n <= this->max_size())
00689           __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
00690         else
00691           __throw_bad_alloc();
00692       }
00693     return __ret;
00694       }
00695 
00696       // __p is not permitted to be a null pointer.
00697       void
00698       deallocate(pointer __p, size_type __n)
00699       { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
00700 
00701       size_type
00702       max_size() const throw() { return size_t(-1) / sizeof(_Tp); }
00703 
00704       void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00705       void destroy(pointer __p) { __p->~_Tp(); }
00706     };
00707 
00708   template<>
00709     class allocator<void>
00710     {
00711     public:
00712       typedef size_t      size_type;
00713       typedef ptrdiff_t   difference_type;
00714       typedef void*       pointer;
00715       typedef const void* const_pointer;
00716       typedef void        value_type;
00717 
00718       template<typename _Tp1>
00719         struct rebind
00720         { typedef allocator<_Tp1> other; };
00721     };
00722 
00723 
00724   template<typename _T1, typename _T2>
00725     inline bool
00726     operator==(const allocator<_T1>&, const allocator<_T2>&)
00727     { return true; }
00728 
00729   template<typename _T1, typename _T2>
00730     inline bool
00731     operator!=(const allocator<_T1>&, const allocator<_T2>&)
00732     { return false; }
00733 
00734 
00747   template<typename _Tp, typename _Alloc>
00748     struct __allocator
00749     {
00750       _Alloc __underlying_alloc;
00751       
00752       typedef size_t    size_type;
00753       typedef ptrdiff_t difference_type;
00754       typedef _Tp*       pointer;
00755       typedef const _Tp* const_pointer;
00756       typedef _Tp&       reference;
00757       typedef const _Tp& const_reference;
00758       typedef _Tp        value_type;
00759 
00760       template<typename _Tp1>
00761         struct rebind
00762         { typedef __allocator<_Tp1, _Alloc> other; };
00763 
00764       __allocator() throw() {}
00765       __allocator(const __allocator& __a) throw()
00766       : __underlying_alloc(__a.__underlying_alloc) {}
00767 
00768       template<typename _Tp1>
00769         __allocator(const __allocator<_Tp1, _Alloc>& __a) throw()
00770         : __underlying_alloc(__a.__underlying_alloc) {}
00771 
00772       ~__allocator() throw() {}
00773 
00774       pointer
00775       address(reference __x) const { return &__x; }
00776 
00777       const_pointer
00778       address(const_reference __x) const { return &__x; }
00779 
00780       // NB: __n is permitted to be 0.  The C++ standard says nothing
00781       // about what the return value is when __n == 0.
00782       _Tp*
00783       allocate(size_type __n, const void* = 0)
00784       {
00785     _Tp* __ret = 0;
00786     if (__n)
00787       __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)));
00788     return __ret;
00789       }
00790 
00791       // __p is not permitted to be a null pointer.
00792       void
00793       deallocate(pointer __p, size_type __n)
00794       { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
00795       
00796       size_type
00797       max_size() const throw() { return size_t(-1) / sizeof(_Tp); }
00798       
00799       void
00800       construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
00801       
00802       void
00803       destroy(pointer __p) { __p->~_Tp(); }
00804     };
00805 
00806   template<typename _Alloc>
00807     struct __allocator<void, _Alloc>
00808     {
00809       typedef size_t      size_type;
00810       typedef ptrdiff_t   difference_type;
00811       typedef void*       pointer;
00812       typedef const void* const_pointer;
00813       typedef void        value_type;
00814 
00815       template<typename _Tp1>
00816         struct rebind
00817         { typedef __allocator<_Tp1, _Alloc> other; };
00818     };
00819 
00820   template<typename _Tp, typename _Alloc>
00821     inline bool
00822     operator==(const __allocator<_Tp,_Alloc>& __a1,
00823                const __allocator<_Tp,_Alloc>& __a2)
00824     { return __a1.__underlying_alloc == __a2.__underlying_alloc; }
00825 
00826   template<typename _Tp, typename _Alloc>
00827     inline bool
00828     operator!=(const __allocator<_Tp, _Alloc>& __a1,
00829                const __allocator<_Tp, _Alloc>& __a2)
00830     { return __a1.__underlying_alloc != __a2.__underlying_alloc; }
00831 
00832 
00834 
00838   template<int inst>
00839     inline bool
00840     operator==(const __malloc_alloc_template<inst>&,
00841                const __malloc_alloc_template<inst>&)
00842     { return true; }
00843 
00844   template<int __inst>
00845     inline bool
00846     operator!=(const __malloc_alloc_template<__inst>&,
00847                const __malloc_alloc_template<__inst>&)
00848     { return false; }
00849 
00850   template<typename _Alloc>
00851     inline bool
00852     operator==(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&)
00853     { return true; }
00854 
00855   template<typename _Alloc>
00856     inline bool
00857     operator!=(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&)
00858     { return false; }
00860 
00861 
00899   // The fully general version.
00900   template<typename _Tp, typename _Allocator>
00901     struct _Alloc_traits
00902     {
00903       static const bool _S_instanceless = false;
00904       typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
00905     };
00906 
00907   template<typename _Tp, typename _Allocator>
00908     const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
00909 
00911   template<typename _Tp, typename _Tp1>
00912     struct _Alloc_traits<_Tp, allocator<_Tp1> >
00913     {
00914       static const bool _S_instanceless = true;
00915       typedef __simple_alloc<_Tp, __alloc> _Alloc_type;
00916       typedef allocator<_Tp> allocator_type;
00917     };
00919 
00921 
00922   template<typename _Tp, int __inst>
00923     struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
00924     {
00925       static const bool _S_instanceless = true;
00926       typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00927       typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00928     };
00929 
00930   template<typename _Tp, bool __threads, int __inst>
00931     struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
00932     {
00933       static const bool _S_instanceless = true;
00934       typedef __simple_alloc<_Tp, __default_alloc_template<__threads, __inst> >
00935       _Alloc_type;
00936       typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> >
00937       allocator_type;
00938     };
00939 
00940   template<typename _Tp, typename _Alloc>
00941     struct _Alloc_traits<_Tp, __debug_alloc<_Alloc> >
00942     {
00943       static const bool _S_instanceless = true;
00944       typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
00945       typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
00946     };
00948 
00950 
00951 
00952   template<typename _Tp, typename _Tp1, int __inst>
00953     struct _Alloc_traits<_Tp,
00954                          __allocator<_Tp1, __malloc_alloc_template<__inst> > >
00955     {
00956       static const bool _S_instanceless = true;
00957       typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
00958       typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
00959     };
00960 
00961   template<typename _Tp, typename _Tp1, bool __thr, int __inst>
00962     struct _Alloc_traits<_Tp, __allocator<_Tp1, __default_alloc_template<__thr, __inst> > >
00963     {
00964       static const bool _S_instanceless = true;
00965       typedef __simple_alloc<_Tp, __default_alloc_template<__thr,__inst> >
00966       _Alloc_type;
00967       typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> >
00968       allocator_type;
00969     };
00970 
00971   template<typename _Tp, typename _Tp1, typename _Alloc>
00972     struct _Alloc_traits<_Tp, __allocator<_Tp1, __debug_alloc<_Alloc> > >
00973     {
00974       static const bool _S_instanceless = true;
00975       typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type;
00976       typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type;
00977     };
00979 
00980   // Inhibit implicit instantiations for required instantiations,
00981   // which are defined via explicit instantiations elsewhere.
00982   // NB: This syntax is a GNU extension.
00983   extern template class allocator<char>;
00984   extern template class allocator<wchar_t>;
00985   extern template class __default_alloc_template<true,0>;
00986 } // namespace std
00987 
00988 #endif

Generated on Thu Nov 21 03:12:49 2002 for libstdc++-v3 Source by doxygen1.2.18-20021030