aboutsummaryrefslogtreecommitdiffstats
path: root/common/alcomplex.cpp
diff options
context:
space:
mode:
authorChris Robinson <[email protected]>2022-11-24 08:01:37 -0800
committerChris Robinson <[email protected]>2022-11-24 08:01:37 -0800
commitcbcb14076016921ab90c386eba073d96c761c5c6 (patch)
tree4a19cc8f7375af8a1fbdd570d2e8bf7571ecfd6a /common/alcomplex.cpp
parent3b838bc781ca2a314fa51e08389037ebce34a9cc (diff)
Avoid returning an array for constexpr tables
Diffstat (limited to 'common/alcomplex.cpp')
-rw-r--r--common/alcomplex.cpp73
1 files changed, 35 insertions, 38 deletions
diff --git a/common/alcomplex.cpp b/common/alcomplex.cpp
index 28792928..eae47227 100644
--- a/common/alcomplex.cpp
+++ b/common/alcomplex.cpp
@@ -21,12 +21,6 @@ namespace {
using ushort = unsigned short;
using ushort2 = std::pair<ushort,ushort>;
-/* Because std::array doesn't have constexpr non-const accessors in C++14. */
-template<typename T, size_t N>
-struct our_array {
- T mData[N];
-};
-
constexpr size_t BitReverseCounter(size_t log2_size) noexcept
{
/* Some magic math that calculates the number of swaps needed for a
@@ -35,51 +29,54 @@ constexpr size_t BitReverseCounter(size_t log2_size) noexcept
return (1u<<(log2_size-1)) - (1u<<((log2_size-1u)/2u));
}
+
template<size_t N>
-constexpr auto GetBitReverser() noexcept
-{
+struct BitReverser {
static_assert(N <= sizeof(ushort)*8, "Too many bits for the bit-reversal table.");
- our_array<ushort2, BitReverseCounter(N)> ret{};
- const size_t fftsize{1u << N};
- size_t ret_i{0};
+ ushort2 mData[BitReverseCounter(N)]{};
- /* Bit-reversal permutation applied to a sequence of fftsize items. */
- for(size_t idx{1u};idx < fftsize-1;++idx)
+ constexpr BitReverser()
{
- size_t revidx{0u}, imask{idx};
- for(size_t i{0};i < N;++i)
- {
- revidx = (revidx<<1) | (imask&1);
- imask >>= 1;
- }
+ const size_t fftsize{1u << N};
+ size_t ret_i{0};
- if(idx < revidx)
+ /* Bit-reversal permutation applied to a sequence of fftsize items. */
+ for(size_t idx{1u};idx < fftsize-1;++idx)
{
- ret.mData[ret_i].first = static_cast<ushort>(idx);
- ret.mData[ret_i].second = static_cast<ushort>(revidx);
- ++ret_i;
+ size_t revidx{0u}, imask{idx};
+ for(size_t i{0};i < N;++i)
+ {
+ revidx = (revidx<<1) | (imask&1);
+ imask >>= 1;
+ }
+
+ if(idx < revidx)
+ {
+ mData[ret_i].first = static_cast<ushort>(idx);
+ mData[ret_i].second = static_cast<ushort>(revidx);
+ ++ret_i;
+ }
}
+ assert(ret_i == al::size(mData));
}
- assert(ret_i == al::size(ret.mData));
- return ret;
-}
+};
/* These bit-reversal swap tables support up to 10-bit indices (1024 elements),
* which is the largest used by OpenAL Soft's filters and effects. Larger FFT
* requests, used by some utilities where performance is less important, will
* use a slower table-less path.
*/
-constexpr auto BitReverser2 = GetBitReverser<2>();
-constexpr auto BitReverser3 = GetBitReverser<3>();
-constexpr auto BitReverser4 = GetBitReverser<4>();
-constexpr auto BitReverser5 = GetBitReverser<5>();
-constexpr auto BitReverser6 = GetBitReverser<6>();
-constexpr auto BitReverser7 = GetBitReverser<7>();
-constexpr auto BitReverser8 = GetBitReverser<8>();
-constexpr auto BitReverser9 = GetBitReverser<9>();
-constexpr auto BitReverser10 = GetBitReverser<10>();
-constexpr al::span<const ushort2> gBitReverses[11]{
+constexpr BitReverser<2> BitReverser2{};
+constexpr BitReverser<3> BitReverser3{};
+constexpr BitReverser<4> BitReverser4{};
+constexpr BitReverser<5> BitReverser5{};
+constexpr BitReverser<6> BitReverser6{};
+constexpr BitReverser<7> BitReverser7{};
+constexpr BitReverser<8> BitReverser8{};
+constexpr BitReverser<9> BitReverser9{};
+constexpr BitReverser<10> BitReverser10{};
+constexpr std::array<al::span<const ushort2>,11> gBitReverses{{
{}, {},
BitReverser2.mData,
BitReverser3.mData,
@@ -90,7 +87,7 @@ constexpr al::span<const ushort2> gBitReverses[11]{
BitReverser8.mData,
BitReverser9.mData,
BitReverser10.mData
-};
+}};
} // namespace
@@ -102,7 +99,7 @@ void complex_fft(const al::span<std::complex<double>> buffer, const double sign)
*/
const size_t log2_size{static_cast<size_t>(al::countr_zero(fftsize))};
- if(unlikely(log2_size >= al::size(gBitReverses)))
+ if(unlikely(log2_size >= gBitReverses.size()))
{
for(size_t idx{1u};idx < fftsize-1;++idx)
{