summaryrefslogtreecommitdiffstats
path: root/vespalib
diff options
context:
space:
mode:
authorHenning Baldersheim <balder@yahoo-inc.com>2022-06-02 07:41:41 +0000
committerHenning Baldersheim <balder@yahoo-inc.com>2022-06-02 07:41:41 +0000
commite6f6eb2a393352313cb06f661bed48c2e4fc6cd3 (patch)
treef11fa91460a599398a41c713ad4c2b3f2dce8ba4 /vespalib
parent972d7ac82e62bb51c4e16d5b740eaa5ce45cdfa8 (diff)
If all else is equal the compiler will assume the if branch is more likely than the else branch.
This is based on experience since gcc 2.8 and til today, and is not a definitive truth. Just more likely. if (criteria) { most_likely_executed code } else { less_likely_executed code } While a single branched if is assumed less likely.
Diffstat (limited to 'vespalib')
-rw-r--r--vespalib/src/vespa/vespalib/util/alloc.cpp20
1 files changed, 10 insertions, 10 deletions
diff --git a/vespalib/src/vespa/vespalib/util/alloc.cpp b/vespalib/src/vespa/vespalib/util/alloc.cpp
index 0a83d39ca09..ed620ec0afe 100644
--- a/vespalib/src/vespa/vespalib/util/alloc.cpp
+++ b/vespalib/src/vespa/vespalib/util/alloc.cpp
@@ -454,33 +454,33 @@ AutoAllocator::resize_inplace(PtrAndSize current, size_t newSize) const {
MMapAllocator::PtrAndSize
AutoAllocator::alloc(size_t sz) const {
- if (useMMap(sz)) {
- sz = roundUpToHugePages(sz);
- return MMapAllocator::salloc(sz, nullptr);
- } else {
+ if ( ! useMMap(sz)) {
if (_alignment == 0) {
return HeapAllocator::salloc(sz);
} else {
return AlignedHeapAllocator(_alignment).alloc(sz);
}
+ } else {
+ sz = roundUpToHugePages(sz);
+ return MMapAllocator::salloc(sz, nullptr);
}
}
void
AutoAllocator::free(PtrAndSize alloc) const {
- if (isMMapped(alloc.second)) {
- return MMapAllocator::sfree(alloc);
- } else {
+ if ( ! isMMapped(alloc.second)) {
return HeapAllocator::sfree(alloc);
+ } else {
+ return MMapAllocator::sfree(alloc);
}
}
void
AutoAllocator::free(void * ptr, size_t sz) const {
- if (useMMap(sz)) {
- return MMapAllocator::sfree(PtrAndSize(ptr, roundUpToHugePages(sz)));
- } else {
+ if ( ! useMMap(sz)) {
return HeapAllocator::sfree(PtrAndSize(ptr, sz));
+ } else {
+ return MMapAllocator::sfree(PtrAndSize(ptr, roundUpToHugePages(sz)));
}
}