summaryrefslogtreecommitdiffstats
path: root/vespamalloc/src/vespamalloc/malloc/threadpool.h
blob: 45f6a0aef6d9d1223be2c86a28da25cf171d0294 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
// Copyright 2016 Yahoo Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#pragma once

#include <atomic>
#include <vespamalloc/malloc/common.h>
#include <vespamalloc/malloc/allocchunk.h>
#include <vespamalloc/malloc/globalpool.h>

namespace vespamalloc {

template <typename MemBlockPtrT, typename ThreadStatT >
class ThreadPoolT
{
public:
    typedef AFList<MemBlockPtrT> ChunkSList;
    typedef AllocPoolT<MemBlockPtrT> AllocPool;
    ThreadPoolT();
    ~ThreadPoolT();
    void setPool(AllocPool & pool) {
        _allocPool = & pool;
    }
    void malloc(size_t sz, MemBlockPtrT & mem) __attribute__((noinline));
    void free(MemBlockPtrT mem, SizeClassT sc) __attribute__((noinline));

    void info(FILE * os, size_t level, const DataSegment<MemBlockPtrT> & ds) const __attribute__((noinline));
    /**
     * Indicates if it represents an active thread.
     * @return true if this represents an active thread.
     */
    bool isActive() const;
    /**
     * Indicates if it represents an active thread that actually has done any allocations/deallocations.
     * @return true if this represents an active used thread.
     */
    bool isUsed() const;
    int osThreadId()       const { return _osThreadId; }
    void quit() { _osThreadId = 0; } // Implicit memory barrier
    void init(int thrId);
    static void setParams(size_t alwayReuseLimit, size_t threadCacheLimit);
    bool grabAvailable();
private:
    bool hasActuallyBeenUsed() const;
    ThreadPoolT(const ThreadPoolT & rhs);
    ThreadPoolT & operator =(const ThreadPoolT & rhs);
    unsigned threadId()    const { return _threadId; }
    void setThreadId(unsigned th)   { _threadId = th; }
    class AllocFree {
    public:
        AllocFree() : _allocFrom(NULL), _freeTo(NULL) { }
        void init(AllocPool & allocPool, SizeClassT sc) {
            if (_allocFrom == NULL) {
                _allocFrom = allocPool.getFree(sc, 1);
                assert(_allocFrom != NULL);
                _freeTo = allocPool.getFree(sc, 1);
                assert(_freeTo != NULL);
            }
        }
        void swap() {
            std::swap(_allocFrom, _freeTo);
        }
        ChunkSList *_allocFrom;
        ChunkSList *_freeTo;
    };
    void mallocHelper(size_t exactSize, SizeClassT sc, AllocFree & af, MemBlockPtrT & mem) __attribute__ ((noinline));
    bool alwaysReuse(SizeClassT sc) { return sc > _alwaysReuseSCLimit; }

    AllocPool   * _allocPool;
    AllocFree     _memList[NUM_SIZE_CLASSES];
    ThreadStatT   _stat[NUM_SIZE_CLASSES];
    unsigned      _threadId;
    std::atomic<ssize_t> _osThreadId;
    static SizeClassT _alwaysReuseSCLimit __attribute__((visibility("hidden")));
    static size_t     _threadCacheLimit __attribute__((visibility("hidden")));
};

}