22#if (dsps_bit_rev_lookup_fc32_ae32_enabled == 1)
31 asm volatile (
"addi %0, %1, 4" :
"=a" (data_i) :
"a" (
data));
32 asm volatile (
"srli %0, %0, 1" :
"+a" (reverse_size));
33 asm volatile (
"loopnez %0, __loop_end_radix2_reorder_lookup_table" ::
"a" (reverse_size));
34 asm volatile (
"l16ui %0, %1, 0" :
"=a" (idx_0) :
"a" (reverse_tab));
35 asm volatile (
"l16ui %0, %1, 2" :
"=a" (idx_1) :
"a" (reverse_tab));
36 asm volatile (
"l16ui %0, %1, 4" :
"=a" (idx_2) :
"a" (reverse_tab));
37 asm volatile (
"l16ui %0, %1, 6" :
"=a" (idx_3) :
"a" (reverse_tab));
38 asm volatile (
"addi %0, %0, 8" :
"+a" (reverse_tab));
40 asm volatile (
"lsx f0, %0, %1" ::
"a" (
data),
"a" (idx_0));
41 asm volatile (
"lsx f2, %0, %1" ::
"a" (data_i),
"a" (idx_0));
42 asm volatile (
"lsx f1, %0, %1" ::
"a" (
data),
"a" (idx_1));
43 asm volatile (
"lsx f3, %0, %1" ::
"a" (data_i),
"a" (idx_1));
45 asm volatile (
"ssx f0, %0, %1" ::
"a" (
data),
"a" (idx_1));
46 asm volatile (
"ssx f2, %0, %1" ::
"a" (data_i),
"a" (idx_1));
47 asm volatile (
"ssx f1, %0, %1" ::
"a" (
data),
"a" (idx_0));
48 asm volatile (
"ssx f3, %0, %1" ::
"a" (data_i),
"a" (idx_0));
50 asm volatile (
"lsx f0, %0, %1" ::
"a" (
data),
"a" (idx_2));
51 asm volatile (
"lsx f2, %0, %1" ::
"a" (data_i),
"a" (idx_2));
52 asm volatile (
"lsx f1, %0, %1" ::
"a" (
data),
"a" (idx_3));
53 asm volatile (
"lsx f3, %0, %1" ::
"a" (data_i),
"a" (idx_3));
55 asm volatile (
"ssx f0, %0, %1" ::
"a" (
data),
"a" (idx_3));
56 asm volatile (
"ssx f2, %0, %1" ::
"a" (data_i),
"a" (idx_3));
57 asm volatile (
"ssx f1, %0, %1" ::
"a" (
data),
"a" (idx_2));
58 asm volatile (
"ssx f3, %0, %1" ::
"a" (data_i),
"a" (idx_2));
60 asm volatile(
"__loop_end_radix2_reorder_lookup_table: nop");
esp_err_t dsps_bit_rev_lookup_fc32_ae32(float *data, int reverse_size, uint16_t *reverse_tab)
static float data[128 *2]