| /OK3568_Linux_fs/kernel/drivers/android/ |
| H A D | binder_alloc.c | 61 static size_t binder_alloc_buffer_size(struct binder_alloc *alloc, in binder_alloc_buffer_size() argument 64 if (list_is_last(&buffer->entry, &alloc->buffers)) in binder_alloc_buffer_size() 65 return alloc->buffer + alloc->buffer_size - buffer->user_data; in binder_alloc_buffer_size() 69 static void binder_insert_free_buffer(struct binder_alloc *alloc, in binder_insert_free_buffer() argument 72 struct rb_node **p = &alloc->free_buffers.rb_node; in binder_insert_free_buffer() 80 new_buffer_size = binder_alloc_buffer_size(alloc, new_buffer); in binder_insert_free_buffer() 84 alloc->pid, new_buffer_size, new_buffer); in binder_insert_free_buffer() 91 buffer_size = binder_alloc_buffer_size(alloc, buffer); in binder_insert_free_buffer() 99 rb_insert_color(&new_buffer->rb_node, &alloc->free_buffers); in binder_insert_free_buffer() 103 struct binder_alloc *alloc, struct binder_buffer *new_buffer) in binder_insert_allocated_buffer_locked() argument [all …]
|
| H A D | binder_alloc_selftest.c | 92 static bool check_buffer_pages_allocated(struct binder_alloc *alloc, in check_buffer_pages_allocated() argument 103 page_index = (page_addr - alloc->buffer) / PAGE_SIZE; in check_buffer_pages_allocated() 104 if (!alloc->pages[page_index].page_ptr || in check_buffer_pages_allocated() 105 !list_empty(&alloc->pages[page_index].lru)) { in check_buffer_pages_allocated() 107 alloc->pages[page_index].page_ptr ? in check_buffer_pages_allocated() 115 static void binder_selftest_alloc_buf(struct binder_alloc *alloc, in binder_selftest_alloc_buf() argument 122 buffers[i] = binder_alloc_new_buf(alloc, sizes[i], 0, 0, 0, 0); in binder_selftest_alloc_buf() 124 !check_buffer_pages_allocated(alloc, buffers[i], in binder_selftest_alloc_buf() 132 static void binder_selftest_free_buf(struct binder_alloc *alloc, in binder_selftest_free_buf() argument 139 binder_alloc_free_buf(alloc, buffers[seq[i]]); in binder_selftest_free_buf() [all …]
|
| H A D | binder_alloc.h | 72 struct binder_alloc *alloc; member 119 void binder_selftest_alloc(struct binder_alloc *alloc); 121 static inline void binder_selftest_alloc(struct binder_alloc *alloc) {} in binder_selftest_alloc() argument 126 extern struct binder_buffer *binder_alloc_new_buf(struct binder_alloc *alloc, 132 extern void binder_alloc_init(struct binder_alloc *alloc); 134 extern void binder_alloc_vma_close(struct binder_alloc *alloc); 136 binder_alloc_prepare_to_free(struct binder_alloc *alloc, 138 extern void binder_alloc_free_buf(struct binder_alloc *alloc, 140 extern int binder_alloc_mmap_handler(struct binder_alloc *alloc, 142 extern void binder_alloc_deferred_release(struct binder_alloc *alloc); [all …]
|
| H A D | binder_trace.h | 314 TP_PROTO(struct binder_alloc *alloc, bool allocate, 316 TP_ARGS(alloc, allocate, start, end), 324 __entry->proc = alloc->pid; 326 __entry->offset = start - alloc->buffer; 335 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), 336 TP_ARGS(alloc, page_index), 342 __entry->proc = alloc->pid; 350 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), 351 TP_ARGS(alloc, page_index)); 354 TP_PROTO(const struct binder_alloc *alloc, size_t page_index), [all …]
|
| /OK3568_Linux_fs/kernel/drivers/infiniband/hw/cxgb4/ |
| H A D | id_table.c | 44 u32 c4iw_id_alloc(struct c4iw_id_table *alloc) in c4iw_id_alloc() argument 49 spin_lock_irqsave(&alloc->lock, flags); in c4iw_id_alloc() 51 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in c4iw_id_alloc() 52 if (obj >= alloc->max) in c4iw_id_alloc() 53 obj = find_first_zero_bit(alloc->table, alloc->max); in c4iw_id_alloc() 55 if (obj < alloc->max) { in c4iw_id_alloc() 56 if (alloc->flags & C4IW_ID_TABLE_F_RANDOM) in c4iw_id_alloc() 57 alloc->last += prandom_u32() % RANDOM_SKIP; in c4iw_id_alloc() 59 alloc->last = obj + 1; in c4iw_id_alloc() 60 if (alloc->last >= alloc->max) in c4iw_id_alloc() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/base/arm/dma_buf_test_exporter/ |
| H A D | dma-buf-test-exporter.c | 83 struct dma_buf_te_alloc *alloc; in dma_buf_te_attach() local 85 alloc = buf->priv; in dma_buf_te_attach() 87 if (alloc->fail_attach) in dma_buf_te_attach() 95 alloc->nr_attached_devices++; in dma_buf_te_attach() 107 struct dma_buf_te_alloc *alloc = buf->priv; in dma_buf_te_detach() local 114 alloc->nr_attached_devices--; in dma_buf_te_detach() 123 struct dma_buf_te_alloc *alloc; in dma_buf_te_map() local 128 alloc = attachment->dmabuf->priv; in dma_buf_te_map() 130 if (alloc->fail_map) in dma_buf_te_map() 139 if (alloc->nr_pages > SG_MAX_SINGLE_ALLOC) in dma_buf_te_map() [all …]
|
| /OK3568_Linux_fs/kernel/sound/isa/gus/ |
| H A D | gus_mem.c | 18 void snd_gf1_mem_lock(struct snd_gf1_mem * alloc, int xup) in snd_gf1_mem_lock() argument 21 mutex_lock(&alloc->memory_mutex); in snd_gf1_mem_lock() 23 mutex_unlock(&alloc->memory_mutex); in snd_gf1_mem_lock() 27 static struct snd_gf1_mem_block *snd_gf1_mem_xalloc(struct snd_gf1_mem * alloc, in snd_gf1_mem_xalloc() argument 36 pblock = alloc->first; in snd_gf1_mem_xalloc() 42 if (pblock == alloc->first) in snd_gf1_mem_xalloc() 43 alloc->first = nblock; in snd_gf1_mem_xalloc() 46 mutex_unlock(&alloc->memory_mutex); in snd_gf1_mem_xalloc() 52 if (alloc->last == NULL) { in snd_gf1_mem_xalloc() 54 alloc->first = alloc->last = nblock; in snd_gf1_mem_xalloc() [all …]
|
| /OK3568_Linux_fs/external/gstreamer-rockchip/gst/rkximage/ |
| H A D | gstkmsallocator.c | 101 check_fd (GstKMSAllocator * alloc) in check_fd() argument 103 return alloc->priv->fd > -1; in check_fd() 257 GstKMSAllocator *alloc; in gst_kms_allocator_free() local 260 alloc = GST_KMS_ALLOCATOR (allocator); in gst_kms_allocator_free() 263 gst_kms_allocator_memory_reset (alloc, kmsmem); in gst_kms_allocator_free() 271 GstKMSAllocator *alloc; in gst_kms_allocator_set_property() local 273 alloc = GST_KMS_ALLOCATOR (object); in gst_kms_allocator_set_property() 279 alloc->priv->fd = dup (fd); in gst_kms_allocator_set_property() 292 GstKMSAllocator *alloc; in gst_kms_allocator_get_property() local 294 alloc = GST_KMS_ALLOCATOR (object); in gst_kms_allocator_get_property() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/infiniband/hw/mthca/ |
| H A D | mthca_allocator.c | 40 u32 mthca_alloc(struct mthca_alloc *alloc) in mthca_alloc() argument 45 spin_lock_irqsave(&alloc->lock, flags); in mthca_alloc() 47 obj = find_next_zero_bit(alloc->table, alloc->max, alloc->last); in mthca_alloc() 48 if (obj >= alloc->max) { in mthca_alloc() 49 alloc->top = (alloc->top + alloc->max) & alloc->mask; in mthca_alloc() 50 obj = find_first_zero_bit(alloc->table, alloc->max); in mthca_alloc() 53 if (obj < alloc->max) { in mthca_alloc() 54 set_bit(obj, alloc->table); in mthca_alloc() 55 obj |= alloc->top; in mthca_alloc() 59 spin_unlock_irqrestore(&alloc->lock, flags); in mthca_alloc() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/arm/mali400/mali/linux/ |
| H A D | mali_memory_ump.c | 22 mali_mem_allocation *alloc; in mali_mem_ump_map() local 34 alloc = mem_backend->mali_allocation; in mali_mem_ump_map() 35 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_map() 37 session = alloc->session; in mali_mem_ump_map() 63 err = mali_mem_mali_map_prepare(alloc); in mali_mem_ump_map() 73 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 83 if (alloc->flags & _MALI_MAP_EXTERNAL_MAP_GUARD_PAGE) { in mali_mem_ump_map() 84 u32 virt = alloc->mali_vma_node.vm_node.start + offset; in mali_mem_ump_map() 98 static void mali_mem_ump_unmap(mali_mem_allocation *alloc) in mali_mem_ump_unmap() argument 101 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_ump_unmap() [all …]
|
| H A D | mali_memory_external.c | 20 mali_mem_allocation *alloc; in mali_mem_unbind_ext_buf() local 23 alloc = mem_backend->mali_allocation; in mali_mem_unbind_ext_buf() 24 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_unbind_ext_buf() 27 session = alloc->session; in mali_mem_unbind_ext_buf() 30 mali_mem_mali_map_free(session, alloc->psize, alloc->mali_vma_node.vm_node.start, in mali_mem_unbind_ext_buf() 31 alloc->flags); in mali_mem_unbind_ext_buf() 35 _mali_osk_errcode_t mali_mem_bind_ext_buf(mali_mem_allocation *alloc, in mali_mem_bind_ext_buf() argument 44 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_mem_bind_ext_buf() 45 size = alloc->psize; in mali_mem_bind_ext_buf() 46 session = (struct mali_session_data *)(uintptr_t)alloc->session; in mali_mem_bind_ext_buf() [all …]
|
| H A D | mali_memory_dma_buf.c | 37 mali_mem_allocation *alloc; in mali_dma_buf_map() local 48 alloc = mem_backend->mali_allocation; in mali_dma_buf_map() 49 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_map() 54 session = alloc->session; in mali_dma_buf_map() 58 virt = alloc->mali_vma_node.vm_node.start; in mali_dma_buf_map() 59 flags = alloc->flags; in mali_dma_buf_map() 79 err = mali_mem_mali_map_prepare(alloc); in mali_dma_buf_map() 123 static void mali_dma_buf_unmap(mali_mem_allocation *alloc, struct mali_dma_buf_attachment *mem) in mali_dma_buf_unmap() argument 125 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_dma_buf_unmap() 129 MALI_DEBUG_ASSERT_POINTER(alloc->session); in mali_dma_buf_unmap() [all …]
|
| H A D | mali_memory_cow.c | 292 mali_mem_allocation *alloc = NULL; in mali_memory_cow_modify_range() local 305 alloc = backend->mali_allocation; in mali_memory_cow_modify_range() 306 MALI_DEBUG_ASSERT_POINTER(alloc); in mali_memory_cow_modify_range() 308 session = alloc->session; in mali_memory_cow_modify_range() 375 MALI_DEBUG_ASSERT(MALI_MEM_COW == alloc->type); in mali_memory_cow_modify_range() 378 if (NULL != alloc->cpu_mapping.vma) { in mali_memory_cow_modify_range() 379 MALI_DEBUG_ASSERT(0 != alloc->backend_handle); in mali_memory_cow_modify_range() 380 MALI_DEBUG_ASSERT(NULL != alloc->cpu_mapping.vma); in mali_memory_cow_modify_range() 381 …MALI_DEBUG_ASSERT(alloc->cpu_mapping.vma->vm_end - alloc->cpu_mapping.vma->vm_start >= range_size); in mali_memory_cow_modify_range() 384 zap_vma_ptes(alloc->cpu_mapping.vma, alloc->cpu_mapping.vma->vm_start + range_start, range_size); in mali_memory_cow_modify_range() [all …]
|
| /OK3568_Linux_fs/kernel/fs/ocfs2/ |
| H A D | localalloc.c | 37 static u32 ocfs2_local_alloc_count_bits(struct ocfs2_dinode *alloc); 40 struct ocfs2_dinode *alloc, 44 static void ocfs2_clear_local_alloc(struct ocfs2_dinode *alloc); 48 struct ocfs2_dinode *alloc, 273 struct ocfs2_dinode *alloc = NULL; in ocfs2_load_local_alloc() local 307 alloc = (struct ocfs2_dinode *) alloc_bh->b_data; in ocfs2_load_local_alloc() 308 la = OCFS2_LOCAL_ALLOC(alloc); in ocfs2_load_local_alloc() 310 if (!(le32_to_cpu(alloc->i_flags) & in ocfs2_load_local_alloc() 327 num_used = ocfs2_local_alloc_count_bits(alloc); in ocfs2_load_local_alloc() 332 || alloc->id1.bitmap1.i_used in ocfs2_load_local_alloc() [all …]
|
| /OK3568_Linux_fs/buildroot/dl/qt5location/git/src/3rdparty/mapbox-gl-native/deps/boost/1.65.1/include/boost/spirit/home/qi/string/detail/ |
| H A D | tst.hpp | 34 destruct_node(tst_node* p, Alloc* alloc) in destruct_node() 39 alloc->delete_data(p->data); in destruct_node() 40 destruct_node(p->lt, alloc); in destruct_node() 41 destruct_node(p->eq, alloc); in destruct_node() 42 destruct_node(p->gt, alloc); in destruct_node() 43 alloc->delete_node(p); in destruct_node() 49 clone_node(tst_node* p, Alloc* alloc) in clone_node() 53 tst_node* clone = alloc->new_node(p->id); in clone_node() 55 clone->data = alloc->new_data(*p->data); in clone_node() 56 clone->lt = clone_node(p->lt, alloc); in clone_node() [all …]
|
| /OK3568_Linux_fs/kernel/fs/xfs/libxfs/ |
| H A D | xfs_alloc_btree.c | 130 len = rec->alloc.ar_blockcount; in xfs_allocbt_update_lastrec() 133 if (be32_to_cpu(rec->alloc.ar_blockcount) <= in xfs_allocbt_update_lastrec() 136 len = rec->alloc.ar_blockcount; in xfs_allocbt_update_lastrec() 186 key->alloc.ar_startblock = rec->alloc.ar_startblock; in xfs_allocbt_init_key_from_rec() 187 key->alloc.ar_blockcount = rec->alloc.ar_blockcount; in xfs_allocbt_init_key_from_rec() 197 x = be32_to_cpu(rec->alloc.ar_startblock); in xfs_bnobt_init_high_key_from_rec() 198 x += be32_to_cpu(rec->alloc.ar_blockcount) - 1; in xfs_bnobt_init_high_key_from_rec() 199 key->alloc.ar_startblock = cpu_to_be32(x); in xfs_bnobt_init_high_key_from_rec() 200 key->alloc.ar_blockcount = 0; in xfs_bnobt_init_high_key_from_rec() 208 key->alloc.ar_blockcount = rec->alloc.ar_blockcount; in xfs_cntbt_init_high_key_from_rec() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/arm/midgard/ |
| H A D | mali_kbase_mem.h | 69 struct kbase_mem_phy_alloc *alloc; member 89 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 177 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 179 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 181 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_mapped() 182 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 185 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 187 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_unmapped() 189 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_unmapped() 190 if (0 > atomic_dec_return(&alloc->gpu_mappings)) { in kbase_mem_phy_alloc_gpu_unmapped() [all …]
|
| H A D | mali_kbase_mem.c | 906 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap() local 908 alloc = reg->gpu_alloc; in kbase_gpu_mmap() 909 stride = alloc->imported.alias.stride; in kbase_gpu_mmap() 910 KBASE_DEBUG_ASSERT(alloc->imported.alias.aliased); in kbase_gpu_mmap() 911 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap() 912 if (alloc->imported.alias.aliased[i].alloc) { in kbase_gpu_mmap() 915 alloc->imported.alias.aliased[i].alloc->pages + alloc->imported.alias.aliased[i].offset, in kbase_gpu_mmap() 916 alloc->imported.alias.aliased[i].length, in kbase_gpu_mmap() 921 kbase_mem_phy_alloc_gpu_mapped(alloc->imported.alias.aliased[i].alloc); in kbase_gpu_mmap() 926 alloc->imported.alias.aliased[i].length, in kbase_gpu_mmap() [all …]
|
| /OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/ |
| H A D | mali_kbase_mem.h | 70 struct kbase_mem_phy_alloc *alloc; member 89 struct kbase_mem_phy_alloc *alloc; /* NULL for special, non-NULL for native */ member 320 void kbase_set_phy_alloc_page_status(struct kbase_mem_phy_alloc *alloc, 323 static inline void kbase_mem_phy_alloc_gpu_mapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_mapped() argument 325 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_mapped() 327 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_mapped() 328 atomic_inc(&alloc->gpu_mappings); in kbase_mem_phy_alloc_gpu_mapped() 331 static inline void kbase_mem_phy_alloc_gpu_unmapped(struct kbase_mem_phy_alloc *alloc) in kbase_mem_phy_alloc_gpu_unmapped() argument 333 KBASE_DEBUG_ASSERT(alloc); in kbase_mem_phy_alloc_gpu_unmapped() 335 if (alloc->type == KBASE_MEM_TYPE_NATIVE) in kbase_mem_phy_alloc_gpu_unmapped() [all …]
|
| H A D | mali_kbase_mem.c | 1748 struct kbase_mem_phy_alloc *alloc; in kbase_gpu_mmap() local 1768 alloc = reg->gpu_alloc; in kbase_gpu_mmap() 1769 group_id = alloc->group_id; in kbase_gpu_mmap() 1772 u64 const stride = alloc->imported.alias.stride; in kbase_gpu_mmap() 1774 KBASE_DEBUG_ASSERT(alloc->imported.alias.aliased); in kbase_gpu_mmap() 1775 for (i = 0; i < alloc->imported.alias.nents; i++) { in kbase_gpu_mmap() 1776 if (alloc->imported.alias.aliased[i].alloc) { in kbase_gpu_mmap() 1779 alloc->imported.alias.aliased[i].alloc->pages + in kbase_gpu_mmap() 1780 alloc->imported.alias.aliased[i].offset, in kbase_gpu_mmap() 1781 alloc->imported.alias.aliased[i].length, in kbase_gpu_mmap() [all …]
|
| H A D | mali_kbase_trace_gpu_mem.c | 167 struct kbase_mem_phy_alloc *alloc) in kbase_remove_dma_buf_usage() argument 175 kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_remove_dma_buf_usage() 178 kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_remove_dma_buf_usage() 184 kbdev->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 187 kctx->kprcs->total_gpu_pages -= alloc->nents; in kbase_remove_dma_buf_usage() 197 struct kbase_mem_phy_alloc *alloc) in kbase_add_dma_buf_usage() argument 206 kctx, alloc->imported.umm.dma_buf, &kbdev->dma_buf_root); in kbase_add_dma_buf_usage() 209 kctx, alloc->imported.umm.dma_buf, &kctx->kprcs->dma_buf_root); in kbase_add_dma_buf_usage() 215 kbdev->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage() 218 kctx->kprcs->total_gpu_pages += alloc->nents; in kbase_add_dma_buf_usage()
|
| /OK3568_Linux_fs/kernel/tools/perf/util/ |
| H A D | strbuf.c | 22 sb->alloc = sb->len = 0; in strbuf_init() 31 if (sb->alloc) { in strbuf_release() 39 char *res = sb->alloc ? sb->buf : NULL; in strbuf_detach() 51 if (nr < sb->alloc) in strbuf_grow() 57 if (alloc_nr(sb->alloc) > nr) in strbuf_grow() 58 nr = alloc_nr(sb->alloc); in strbuf_grow() 64 buf = realloc(sb->alloc ? sb->buf : NULL, nr * sizeof(*buf)); in strbuf_grow() 69 sb->alloc = nr; in strbuf_grow() 106 len = vsnprintf(sb->buf + sb->len, sb->alloc - sb->len, fmt, ap); in strbuf_addv() 117 len = vsnprintf(sb->buf + sb->len, sb->alloc - sb->len, fmt, ap_saved); in strbuf_addv() [all …]
|
| /OK3568_Linux_fs/kernel/net/core/ |
| H A D | page_pool.c | 134 pool->alloc.cache[pool->alloc.count++] = page; in page_pool_refill_alloc_cache() 145 } while (pool->alloc.count < PP_ALLOC_CACHE_REFILL); in page_pool_refill_alloc_cache() 148 if (likely(pool->alloc.count > 0)) in page_pool_refill_alloc_cache() 149 page = pool->alloc.cache[--pool->alloc.count]; in page_pool_refill_alloc_cache() 161 if (likely(pool->alloc.count)) { in __page_pool_get_cached() 163 page = pool->alloc.cache[--pool->alloc.count]; in __page_pool_get_cached() 345 if (unlikely(pool->alloc.count == PP_ALLOC_CACHE_SIZE)) in page_pool_recycle_in_cache() 349 pool->alloc.cache[pool->alloc.count++] = page; in page_pool_recycle_in_cache() 452 while (pool->alloc.count) { in page_pool_empty_alloc_cache_once() 453 page = pool->alloc.cache[--pool->alloc.count]; in page_pool_empty_alloc_cache_once() [all …]
|
| /OK3568_Linux_fs/buildroot/dl/qt5location/git/src/3rdparty/mapbox-gl-native/deps/boost/1.65.1/include/boost/container/ |
| H A D | vector.hpp | 421 allocator_type &this_alloc = this->alloc(); in vector_alloc_holder() 422 allocator_type &x_alloc = holder.alloc(); in vector_alloc_holder() 425 this->alloc().deallocate(this->m_start, this->m_capacity); in vector_alloc_holder() 461 this->alloc().deallocate(this->m_start, this->m_capacity); in ~vector_alloc_holder() 493 >::get( allocator_traits_type::max_size(this->alloc()) in next_capacity() 517 BOOST_CONTAINER_FORCEINLINE Allocator &alloc() BOOST_NOEXCEPT_OR_NOTHROW in alloc() function 520 BOOST_CONTAINER_FORCEINLINE const Allocator &alloc() const BOOST_NOEXCEPT_OR_NOTHROW in alloc() function 549 … pointer const p = allocator_traits_type::allocate(this->alloc(), prefer_in_recvd_out_size, reuse); in priv_allocation_command() 559 return this->alloc().allocation_command(command, limit_size, prefer_in_recvd_out_size, reuse); in priv_allocation_command() 616 …(this->alloc(), boost::movelib::to_raw_pointer(holder.start()), m_size, boost::movelib::to_raw_poi… in vector_alloc_holder() [all …]
|
| /OK3568_Linux_fs/kernel/tools/lib/subcmd/ |
| H A D | subcmd-util.h | 39 #define ALLOC_GROW(x, nr, alloc) \ argument 41 if ((nr) > alloc) { \ 42 if (alloc_nr(alloc) < (nr)) \ 43 alloc = (nr); \ 45 alloc = alloc_nr(alloc); \ 46 x = xrealloc((x), alloc * sizeof(*(x))); \
|