Skip to content

Instantly share code, notes, and snippets.

@SteveBronder
Last active January 21, 2023 21:08
Show Gist options
  • Save SteveBronder/e881e4b350e3f5893c2792ed5968da7f to your computer and use it in GitHub Desktop.
Save SteveBronder/e881e4b350e3f5893c2792ed5968da7f to your computer and use it in GitHub Desktop.
#include <atomic>
#include <cassert>
#include <iostream>
#include <memory>
#include <vector>
#include <array>
#define NO_INLINE __attribute__((noinline))
#define ALWAYS_INLINE __attribute__((always_inline))
#define likely(x) __builtin_expect((x), 1)
#define unlikely(x) __builtin_expect((x), 0)
template <std::size_t N>
struct ringbuffer {
std::array<int, N> data_{};
// tail: the point at which the consumer finds the next item in the buffer
alignas(64) std::atomic<size_t> readIdx_{0};
alignas(64) size_t writeIdxCached_{0};
// head: the point at which the producer inserts items into the buffer.
alignas(64) std::atomic<size_t> writeIdx_{0};
alignas(64) size_t readIdxCached_{0};
ALWAYS_INLINE inline bool push(int val) noexcept {
const auto writeIdx = writeIdx_.load(std::memory_order_relaxed);
auto nextWriteIdx = (writeIdx + 1) & (N - 1);
if (unlikely(nextWriteIdx == readIdxCached_)) {
readIdxCached_ = readIdx_.load(std::memory_order_acquire);
if (nextWriteIdx == readIdxCached_) {
return false;
}
}
data_[writeIdx] = val;
writeIdx_.store(nextWriteIdx, std::memory_order_release);
return true;
}
ALWAYS_INLINE inline bool pop(int &val) noexcept {
auto const readIdx = readIdx_.load(std::memory_order_relaxed);
if (unlikely(readIdx == writeIdxCached_)) {
writeIdxCached_ = writeIdx_.load(std::memory_order_acquire);
if (readIdx == writeIdxCached_) {
return false;
}
}
val = data_[readIdx];
auto nextReadIdx = (readIdx + 1) & (N - 1);
readIdx_.store(nextReadIdx, std::memory_order_release);
return true;
}
};
int main() {
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment