/** @file wls_radix4Fft.c * * @brief This file contains fixed-point radix-4 FFT function * * Copyright 2023-2024 NXP * * SPDX-License-Identifier: BSD-3-Clause * */ /************************************************************************ * DFW fixed-point radix-4 FFT function ************************************************************************/ #include #if CONFIG_WLS_CSI_PROC #include #include "wls_param_defines.h" #include "wls_radix4Fft.h" //#include "math.h" //#define ARM_DEBUG #if defined(MAX_FFT_SIZE_2048) const INT16 radix4FftTwiddleArr[2 * MAX_FFT_SIZE] = { 0x7FFF, 0x0000, 0x7FFF, 0x0065, 0x7FFF, 0x00C9, 0x7FFF, 0x012E, 0x7FFE, 0x0192, 0x7FFC, 0x01F7, 0x7FFA, 0x025B, 0x7FF8, 0x02C0, 0x7FF6, 0x0324, 0x7FF4, 0x0389, 0x7FF1, 0x03ED, 0x7FED, 0x0452, 0x7FEA, 0x04B6, 0x7FE6, 0x051B, 0x7FE2, 0x057F, 0x7FDD, 0x05E3, 0x7FD9, 0x0648, 0x7FD3, 0x06AC, 0x7FCE, 0x0711, 0x7FC8, 0x0775, 0x7FC2, 0x07D9, 0x7FBC, 0x083E, 0x7FB5, 0x08A2, 0x7FAE, 0x0906, 0x7FA7, 0x096B, 0x7FA0, 0x09CF, 0x7F98, 0x0A33, 0x7F90, 0x0A97, 0x7F87, 0x0AFB, 0x7F7E, 0x0B60, 0x7F75, 0x0BC4, 0x7F6C, 0x0C28, 0x7F62, 0x0C8C, 0x7F58, 0x0CF0, 0x7F4E, 0x0D54, 0x7F43, 0x0DB8, 0x7F38, 0x0E1C, 0x7F2D, 0x0E80, 0x7F22, 0x0EE4, 0x7F16, 0x0F47, 0x7F0A, 0x0FAB, 0x7EFD, 0x100F, 0x7EF0, 0x1073, 0x7EE3, 0x10D6, 0x7ED6, 0x113A, 0x7EC8, 0x119E, 0x7EBA, 0x1201, 0x7EAC, 0x1265, 0x7E9D, 0x12C8, 0x7E8E, 0x132B, 0x7E7F, 0x138F, 0x7E70, 0x13F2, 0x7E60, 0x1455, 0x7E50, 0x14B9, 0x7E3F, 0x151C, 0x7E2F, 0x157F, 0x7E1E, 0x15E2, 0x7E0C, 0x1645, 0x7DFB, 0x16A8, 0x7DE9, 0x170B, 0x7DD6, 0x176E, 0x7DC4, 0x17D1, 0x7DB1, 0x1833, 0x7D9E, 0x1896, 0x7D8A, 0x18F9, 0x7D77, 0x195B, 0x7D63, 0x19BE, 0x7D4E, 0x1A20, 0x7D3A, 0x1A83, 0x7D25, 0x1AE5, 0x7D0F, 0x1B47, 0x7CFA, 0x1BA9, 0x7CE4, 0x1C0C, 0x7CCE, 0x1C6E, 0x7CB7, 0x1CD0, 0x7CA0, 0x1D31, 0x7C89, 0x1D93, 0x7C72, 0x1DF5, 0x7C5A, 0x1E57, 0x7C42, 0x1EB8, 0x7C2A, 0x1F1A, 0x7C11, 0x1F7B, 0x7BF9, 0x1FDD, 0x7BDF, 0x203E, 0x7BC6, 0x209F, 0x7BAC, 0x2101, 0x7B92, 0x2162, 0x7B78, 0x21C3, 0x7B5D, 0x2224, 0x7B42, 0x2284, 0x7B27, 0x22E5, 0x7B0B, 0x2346, 0x7AEF, 0x23A7, 0x7AD3, 0x2407, 0x7AB7, 0x2467, 0x7A9A, 0x24C8, 0x7A7D, 0x2528, 0x7A60, 0x2588, 0x7A42, 0x25E8, 0x7A24, 0x2648, 0x7A06, 0x26A8, 0x79E7, 0x2708, 0x79C9, 0x2768, 0x79AA, 0x27C7, 0x798A, 0x2827, 0x796A, 0x2886, 0x794A, 0x28E5, 0x792A, 0x2945, 0x790A, 0x29A4, 0x78E9, 0x2A03, 0x78C8, 0x2A62, 0x78A6, 0x2AC1, 0x7885, 0x2B1F, 0x7863, 0x2B7E, 0x7840, 0x2BDC, 0x781E, 0x2C3B, 0x77FB, 0x2C99, 0x77D8, 0x2CF7, 0x77B4, 0x2D55, 0x7790, 0x2DB3, 0x776C, 0x2E11, 0x7748, 0x2E6F, 0x7723, 0x2ECC, 0x76FE, 0x2F2A, 0x76D9, 0x2F87, 0x76B4, 0x2FE5, 0x768E, 0x3042, 0x7668, 0x309F, 0x7642, 0x30FC, 0x761B, 0x3159, 0x75F4, 0x31B5, 0x75CD, 0x3212, 0x75A6, 0x326E, 0x757E, 0x32CB, 0x7556, 0x3327, 0x752D, 0x3383, 0x7505, 0x33DF, 0x74DC, 0x343B, 0x74B3, 0x3497, 0x7489, 0x34F2, 0x7460, 0x354E, 0x7436, 0x35A9, 0x740B, 0x3604, 0x73E1, 0x365F, 0x73B6, 0x36BA, 0x738B, 0x3715, 0x735F, 0x3770, 0x7334, 0x37CA, 0x7308, 0x3825, 0x72DC, 0x387F, 0x72AF, 0x38D9, 0x7282, 0x3933, 0x7255, 0x398D, 0x7228, 0x39E7, 0x71FA, 0x3A40, 0x71CC, 0x3A9A, 0x719E, 0x3AF3, 0x7170, 0x3B4C, 0x7141, 0x3BA5, 0x7112, 0x3BFE, 0x70E3, 0x3C57, 0x70B3, 0x3CAF, 0x7083, 0x3D08, 0x7053, 0x3D60, 0x7023, 0x3DB8, 0x6FF2, 0x3E10, 0x6FC2, 0x3E68, 0x6F90, 0x3EC0, 0x6F5F, 0x3F17, 0x6F2D, 0x3F6F, 0x6EFB, 0x3FC6, 0x6EC9, 0x401D, 0x6E97, 0x4074, 0x6E64, 0x40CB, 0x6E31, 0x4121, 0x6DFE, 0x4178, 0x6DCA, 0x41CE, 0x6D96, 0x4224, 0x6D62, 0x427A, 0x6D2E, 0x42D0, 0x6CF9, 0x4326, 0x6CC4, 0x437B, 0x6C8F, 0x43D1, 0x6C5A, 0x4426, 0x6C24, 0x447B, 0x6BEE, 0x44D0, 0x6BB8, 0x4524, 0x6B82, 0x4579, 0x6B4B, 0x45CD, 0x6B14, 0x4621, 0x6ADD, 0x4675, 0x6AA5, 0x46C9, 0x6A6E, 0x471D, 0x6A36, 0x4770, 0x69FD, 0x47C4, 0x69C5, 0x4817, 0x698C, 0x486A, 0x6953, 0x48BD, 0x691A, 0x490F, 0x68E0, 0x4962, 0x68A7, 0x49B4, 0x686D, 0x4A06, 0x6832, 0x4A58, 0x67F8, 0x4AAA, 0x67BD, 0x4AFB, 0x6782, 0x4B4D, 0x6747, 0x4B9E, 0x670B, 0x4BEF, 0x66D0, 0x4C40, 0x6693, 0x4C91, 0x6657, 0x4CE1, 0x661B, 0x4D31, 0x65DE, 0x4D81, 0x65A1, 0x4DD1, 0x6564, 0x4E21, 0x6526, 0x4E71, 0x64E9, 0x4EC0, 0x64AB, 0x4F0F, 0x646C, 0x4F5E, 0x642E, 0x4FAD, 0x63EF, 0x4FFB, 0x63B0, 0x504A, 0x6371, 0x5098, 0x6332, 0x50E6, 0x62F2, 0x5134, 0x62B2, 0x5181, 0x6272, 0x51CF, 0x6232, 0x521C, 0x61F1, 0x5269, 0x61B0, 0x52B6, 0x616F, 0x5303, 0x612E, 0x534F, 0x60EC, 0x539B, 0x60AA, 0x53E7, 0x6068, 0x5433, 0x6026, 0x547F, 0x5FE4, 0x54CA, 0x5FA1, 0x5515, 0x5F5E, 0x5560, 0x5F1B, 0x55AB, 0x5ED7, 0x55F6, 0x5E94, 0x5640, 0x5E50, 0x568A, 0x5E0C, 0x56D4, 0x5DC8, 0x571E, 0x5D83, 0x5767, 0x5D3E, 0x57B1, 0x5CF9, 0x57FA, 0x5CB4, 0x5843, 0x5C6F, 0x588C, 0x5C29, 0x58D4, 0x5BE3, 0x591C, 0x5B9D, 0x5964, 0x5B57, 0x59AC, 0x5B10, 0x59F4, 0x5AC9, 0x5A3B, 0x5A82, 0x5A82, 0x5A3B, 0x5AC9, 0x59F4, 0x5B10, 0x59AC, 0x5B57, 0x5964, 0x5B9D, 0x591C, 0x5BE3, 0x58D4, 0x5C29, 0x588C, 0x5C6F, 0x5843, 0x5CB4, 0x57FA, 0x5CF9, 0x57B1, 0x5D3E, 0x5767, 0x5D83, 0x571E, 0x5DC8, 0x56D4, 0x5E0C, 0x568A, 0x5E50, 0x5640, 0x5E94, 0x55F6, 0x5ED7, 0x55AB, 0x5F1B, 0x5560, 0x5F5E, 0x5515, 0x5FA1, 0x54CA, 0x5FE4, 0x547F, 0x6026, 0x5433, 0x6068, 0x53E7, 0x60AA, 0x539B, 0x60EC, 0x534F, 0x612E, 0x5303, 0x616F, 0x52B6, 0x61B0, 0x5269, 0x61F1, 0x521C, 0x6232, 0x51CF, 0x6272, 0x5181, 0x62B2, 0x5134, 0x62F2, 0x50E6, 0x6332, 0x5098, 0x6371, 0x504A, 0x63B0, 0x4FFB, 0x63EF, 0x4FAD, 0x642E, 0x4F5E, 0x646C, 0x4F0F, 0x64AB, 0x4EC0, 0x64E9, 0x4E71, 0x6526, 0x4E21, 0x6564, 0x4DD1, 0x65A1, 0x4D81, 0x65DE, 0x4D31, 0x661B, 0x4CE1, 0x6657, 0x4C91, 0x6693, 0x4C40, 0x66D0, 0x4BEF, 0x670B, 0x4B9E, 0x6747, 0x4B4D, 0x6782, 0x4AFB, 0x67BD, 0x4AAA, 0x67F8, 0x4A58, 0x6832, 0x4A06, 0x686D, 0x49B4, 0x68A7, 0x4962, 0x68E0, 0x490F, 0x691A, 0x48BD, 0x6953, 0x486A, 0x698C, 0x4817, 0x69C5, 0x47C4, 0x69FD, 0x4770, 0x6A36, 0x471D, 0x6A6E, 0x46C9, 0x6AA5, 0x4675, 0x6ADD, 0x4621, 0x6B14, 0x45CD, 0x6B4B, 0x4579, 0x6B82, 0x4524, 0x6BB8, 0x44D0, 0x6BEE, 0x447B, 0x6C24, 0x4426, 0x6C5A, 0x43D1, 0x6C8F, 0x437B, 0x6CC4, 0x4326, 0x6CF9, 0x42D0, 0x6D2E, 0x427A, 0x6D62, 0x4224, 0x6D96, 0x41CE, 0x6DCA, 0x4178, 0x6DFE, 0x4121, 0x6E31, 0x40CB, 0x6E64, 0x4074, 0x6E97, 0x401D, 0x6EC9, 0x3FC6, 0x6EFB, 0x3F6F, 0x6F2D, 0x3F17, 0x6F5F, 0x3EC0, 0x6F90, 0x3E68, 0x6FC2, 0x3E10, 0x6FF2, 0x3DB8, 0x7023, 0x3D60, 0x7053, 0x3D08, 0x7083, 0x3CAF, 0x70B3, 0x3C57, 0x70E3, 0x3BFE, 0x7112, 0x3BA5, 0x7141, 0x3B4C, 0x7170, 0x3AF3, 0x719E, 0x3A9A, 0x71CC, 0x3A40, 0x71FA, 0x39E7, 0x7228, 0x398D, 0x7255, 0x3933, 0x7282, 0x38D9, 0x72AF, 0x387F, 0x72DC, 0x3825, 0x7308, 0x37CA, 0x7334, 0x3770, 0x735F, 0x3715, 0x738B, 0x36BA, 0x73B6, 0x365F, 0x73E1, 0x3604, 0x740B, 0x35A9, 0x7436, 0x354E, 0x7460, 0x34F2, 0x7489, 0x3497, 0x74B3, 0x343B, 0x74DC, 0x33DF, 0x7505, 0x3383, 0x752D, 0x3327, 0x7556, 0x32CB, 0x757E, 0x326E, 0x75A6, 0x3212, 0x75CD, 0x31B5, 0x75F4, 0x3159, 0x761B, 0x30FC, 0x7642, 0x309F, 0x7668, 0x3042, 0x768E, 0x2FE5, 0x76B4, 0x2F87, 0x76D9, 0x2F2A, 0x76FE, 0x2ECC, 0x7723, 0x2E6F, 0x7748, 0x2E11, 0x776C, 0x2DB3, 0x7790, 0x2D55, 0x77B4, 0x2CF7, 0x77D8, 0x2C99, 0x77FB, 0x2C3B, 0x781E, 0x2BDC, 0x7840, 0x2B7E, 0x7863, 0x2B1F, 0x7885, 0x2AC1, 0x78A6, 0x2A62, 0x78C8, 0x2A03, 0x78E9, 0x29A4, 0x790A, 0x2945, 0x792A, 0x28E5, 0x794A, 0x2886, 0x796A, 0x2827, 0x798A, 0x27C7, 0x79AA, 0x2768, 0x79C9, 0x2708, 0x79E7, 0x26A8, 0x7A06, 0x2648, 0x7A24, 0x25E8, 0x7A42, 0x2588, 0x7A60, 0x2528, 0x7A7D, 0x24C8, 0x7A9A, 0x2467, 0x7AB7, 0x2407, 0x7AD3, 0x23A7, 0x7AEF, 0x2346, 0x7B0B, 0x22E5, 0x7B27, 0x2284, 0x7B42, 0x2224, 0x7B5D, 0x21C3, 0x7B78, 0x2162, 0x7B92, 0x2101, 0x7BAC, 0x209F, 0x7BC6, 0x203E, 0x7BDF, 0x1FDD, 0x7BF9, 0x1F7B, 0x7C11, 0x1F1A, 0x7C2A, 0x1EB8, 0x7C42, 0x1E57, 0x7C5A, 0x1DF5, 0x7C72, 0x1D93, 0x7C89, 0x1D31, 0x7CA0, 0x1CD0, 0x7CB7, 0x1C6E, 0x7CCE, 0x1C0C, 0x7CE4, 0x1BA9, 0x7CFA, 0x1B47, 0x7D0F, 0x1AE5, 0x7D25, 0x1A83, 0x7D3A, 0x1A20, 0x7D4E, 0x19BE, 0x7D63, 0x195B, 0x7D77, 0x18F9, 0x7D8A, 0x1896, 0x7D9E, 0x1833, 0x7DB1, 0x17D1, 0x7DC4, 0x176E, 0x7DD6, 0x170B, 0x7DE9, 0x16A8, 0x7DFB, 0x1645, 0x7E0C, 0x15E2, 0x7E1E, 0x157F, 0x7E2F, 0x151C, 0x7E3F, 0x14B9, 0x7E50, 0x1455, 0x7E60, 0x13F2, 0x7E70, 0x138F, 0x7E7F, 0x132B, 0x7E8E, 0x12C8, 0x7E9D, 0x1265, 0x7EAC, 0x1201, 0x7EBA, 0x119E, 0x7EC8, 0x113A, 0x7ED6, 0x10D6, 0x7EE3, 0x1073, 0x7EF0, 0x100F, 0x7EFD, 0x0FAB, 0x7F0A, 0x0F47, 0x7F16, 0x0EE4, 0x7F22, 0x0E80, 0x7F2D, 0x0E1C, 0x7F38, 0x0DB8, 0x7F43, 0x0D54, 0x7F4E, 0x0CF0, 0x7F58, 0x0C8C, 0x7F62, 0x0C28, 0x7F6C, 0x0BC4, 0x7F75, 0x0B60, 0x7F7E, 0x0AFB, 0x7F87, 0x0A97, 0x7F90, 0x0A33, 0x7F98, 0x09CF, 0x7FA0, 0x096B, 0x7FA7, 0x0906, 0x7FAE, 0x08A2, 0x7FB5, 0x083E, 0x7FBC, 0x07D9, 0x7FC2, 0x0775, 0x7FC8, 0x0711, 0x7FCE, 0x06AC, 0x7FD3, 0x0648, 0x7FD9, 0x05E3, 0x7FDD, 0x057F, 0x7FE2, 0x051B, 0x7FE6, 0x04B6, 0x7FEA, 0x0452, 0x7FED, 0x03ED, 0x7FF1, 0x0389, 0x7FF4, 0x0324, 0x7FF6, 0x02C0, 0x7FF8, 0x025B, 0x7FFA, 0x01F7, 0x7FFC, 0x0192, 0x7FFE, 0x012E, 0x7FFF, 0x00C9, 0x7FFF, 0x0065, 0x7FFF, 0x0000, 0x7FFF, 0xFF9B, 0x7FFF, 0xFF37, 0x7FFF, 0xFED2, 0x7FFF, 0xFE6E, 0x7FFE, 0xFE09, 0x7FFC, 0xFDA5, 0x7FFA, 0xFD40, 0x7FF8, 0xFCDC, 0x7FF6, 0xFC77, 0x7FF4, 0xFC13, 0x7FF1, 0xFBAE, 0x7FED, 0xFB4A, 0x7FEA, 0xFAE5, 0x7FE6, 0xFA81, 0x7FE2, 0xFA1D, 0x7FDD, 0xF9B8, 0x7FD9, 0xF954, 0x7FD3, 0xF8EF, 0x7FCE, 0xF88B, 0x7FC8, 0xF827, 0x7FC2, 0xF7C2, 0x7FBC, 0xF75E, 0x7FB5, 0xF6FA, 0x7FAE, 0xF695, 0x7FA7, 0xF631, 0x7FA0, 0xF5CD, 0x7F98, 0xF569, 0x7F90, 0xF505, 0x7F87, 0xF4A0, 0x7F7E, 0xF43C, 0x7F75, 0xF3D8, 0x7F6C, 0xF374, 0x7F62, 0xF310, 0x7F58, 0xF2AC, 0x7F4E, 0xF248, 0x7F43, 0xF1E4, 0x7F38, 0xF180, 0x7F2D, 0xF11C, 0x7F22, 0xF0B9, 0x7F16, 0xF055, 0x7F0A, 0xEFF1, 0x7EFD, 0xEF8D, 0x7EF0, 0xEF2A, 0x7EE3, 0xEEC6, 0x7ED6, 0xEE62, 0x7EC8, 0xEDFF, 0x7EBA, 0xED9B, 0x7EAC, 0xED38, 0x7E9D, 0xECD5, 0x7E8E, 0xEC71, 0x7E7F, 0xEC0E, 0x7E70, 0xEBAB, 0x7E60, 0xEB47, 0x7E50, 0xEAE4, 0x7E3F, 0xEA81, 0x7E2F, 0xEA1E, 0x7E1E, 0xE9BB, 0x7E0C, 0xE958, 0x7DFB, 0xE8F5, 0x7DE9, 0xE892, 0x7DD6, 0xE82F, 0x7DC4, 0xE7CD, 0x7DB1, 0xE76A, 0x7D9E, 0xE707, 0x7D8A, 0xE6A5, 0x7D77, 0xE642, 0x7D63, 0xE5E0, 0x7D4E, 0xE57D, 0x7D3A, 0xE51B, 0x7D25, 0xE4B9, 0x7D0F, 0xE457, 0x7CFA, 0xE3F4, 0x7CE4, 0xE392, 0x7CCE, 0xE330, 0x7CB7, 0xE2CF, 0x7CA0, 0xE26D, 0x7C89, 0xE20B, 0x7C72, 0xE1A9, 0x7C5A, 0xE148, 0x7C42, 0xE0E6, 0x7C2A, 0xE085, 0x7C11, 0xE023, 0x7BF9, 0xDFC2, 0x7BDF, 0xDF61, 0x7BC6, 0xDEFF, 0x7BAC, 0xDE9E, 0x7B92, 0xDE3D, 0x7B78, 0xDDDC, 0x7B5D, 0xDD7C, 0x7B42, 0xDD1B, 0x7B27, 0xDCBA, 0x7B0B, 0xDC59, 0x7AEF, 0xDBF9, 0x7AD3, 0xDB99, 0x7AB7, 0xDB38, 0x7A9A, 0xDAD8, 0x7A7D, 0xDA78, 0x7A60, 0xDA18, 0x7A42, 0xD9B8, 0x7A24, 0xD958, 0x7A06, 0xD8F8, 0x79E7, 0xD898, 0x79C9, 0xD839, 0x79AA, 0xD7D9, 0x798A, 0xD77A, 0x796A, 0xD71B, 0x794A, 0xD6BB, 0x792A, 0xD65C, 0x790A, 0xD5FD, 0x78E9, 0xD59E, 0x78C8, 0xD53F, 0x78A6, 0xD4E1, 0x7885, 0xD482, 0x7863, 0xD424, 0x7840, 0xD3C5, 0x781E, 0xD367, 0x77FB, 0xD309, 0x77D8, 0xD2AB, 0x77B4, 0xD24D, 0x7790, 0xD1EF, 0x776C, 0xD191, 0x7748, 0xD134, 0x7723, 0xD0D6, 0x76FE, 0xD079, 0x76D9, 0xD01B, 0x76B4, 0xCFBE, 0x768E, 0xCF61, 0x7668, 0xCF04, 0x7642, 0xCEA7, 0x761B, 0xCE4B, 0x75F4, 0xCDEE, 0x75CD, 0xCD92, 0x75A6, 0xCD35, 0x757E, 0xCCD9, 0x7556, 0xCC7D, 0x752D, 0xCC21, 0x7505, 0xCBC5, 0x74DC, 0xCB69, 0x74B3, 0xCB0E, 0x7489, 0xCAB2, 0x7460, 0xCA57, 0x7436, 0xC9FC, 0x740B, 0xC9A1, 0x73E1, 0xC946, 0x73B6, 0xC8EB, 0x738B, 0xC890, 0x735F, 0xC836, 0x7334, 0xC7DB, 0x7308, 0xC781, 0x72DC, 0xC727, 0x72AF, 0xC6CD, 0x7282, 0xC673, 0x7255, 0xC619, 0x7228, 0xC5C0, 0x71FA, 0xC566, 0x71CC, 0xC50D, 0x719E, 0xC4B4, 0x7170, 0xC45B, 0x7141, 0xC402, 0x7112, 0xC3A9, 0x70E3, 0xC351, 0x70B3, 0xC2F8, 0x7083, 0xC2A0, 0x7053, 0xC248, 0x7023, 0xC1F0, 0x6FF2, 0xC198, 0x6FC2, 0xC140, 0x6F90, 0xC0E9, 0x6F5F, 0xC091, 0x6F2D, 0xC03A, 0x6EFB, 0xBFE3, 0x6EC9, 0xBF8C, 0x6E97, 0xBF35, 0x6E64, 0xBEDF, 0x6E31, 0xBE88, 0x6DFE, 0xBE32, 0x6DCA, 0xBDDC, 0x6D96, 0xBD86, 0x6D62, 0xBD30, 0x6D2E, 0xBCDA, 0x6CF9, 0xBC85, 0x6CC4, 0xBC2F, 0x6C8F, 0xBBDA, 0x6C5A, 0xBB85, 0x6C24, 0xBB30, 0x6BEE, 0xBADC, 0x6BB8, 0xBA87, 0x6B82, 0xBA33, 0x6B4B, 0xB9DF, 0x6B14, 0xB98B, 0x6ADD, 0xB937, 0x6AA5, 0xB8E3, 0x6A6E, 0xB890, 0x6A36, 0xB83C, 0x69FD, 0xB7E9, 0x69C5, 0xB796, 0x698C, 0xB743, 0x6953, 0xB6F1, 0x691A, 0xB69E, 0x68E0, 0xB64C, 0x68A7, 0xB5FA, 0x686D, 0xB5A8, 0x6832, 0xB556, 0x67F8, 0xB505, 0x67BD, 0xB4B3, 0x6782, 0xB462, 0x6747, 0xB411, 0x670B, 0xB3C0, 0x66D0, 0xB36F, 0x6693, 0xB31F, 0x6657, 0xB2CF, 0x661B, 0xB27F, 0x65DE, 0xB22F, 0x65A1, 0xB1DF, 0x6564, 0xB18F, 0x6526, 0xB140, 0x64E9, 0xB0F1, 0x64AB, 0xB0A2, 0x646C, 0xB053, 0x642E, 0xB005, 0x63EF, 0xAFB6, 0x63B0, 0xAF68, 0x6371, 0xAF1A, 0x6332, 0xAECC, 0x62F2, 0xAE7F, 0x62B2, 0xAE31, 0x6272, 0xADE4, 0x6232, 0xAD97, 0x61F1, 0xAD4A, 0x61B0, 0xACFD, 0x616F, 0xACB1, 0x612E, 0xAC65, 0x60EC, 0xAC19, 0x60AA, 0xABCD, 0x6068, 0xAB81, 0x6026, 0xAB36, 0x5FE4, 0xAAEB, 0x5FA1, 0xAAA0, 0x5F5E, 0xAA55, 0x5F1B, 0xAA0A, 0x5ED7, 0xA9C0, 0x5E94, 0xA976, 0x5E50, 0xA92C, 0x5E0C, 0xA8E2, 0x5DC8, 0xA899, 0x5D83, 0xA84F, 0x5D3E, 0xA806, 0x5CF9, 0xA7BD, 0x5CB4, 0xA774, 0x5C6F, 0xA72C, 0x5C29, 0xA6E4, 0x5BE3, 0xA69C, 0x5B9D, 0xA654, 0x5B57, 0xA60C, 0x5B10, 0xA5C5, 0x5AC9, 0xA57E, 0x5A82, 0xA537, 0x5A3B, 0xA4F0, 0x59F4, 0xA4A9, 0x59AC, 0xA463, 0x5964, 0xA41D, 0x591C, 0xA3D7, 0x58D4, 0xA391, 0x588C, 0xA34C, 0x5843, 0xA307, 0x57FA, 0xA2C2, 0x57B1, 0xA27D, 0x5767, 0xA238, 0x571E, 0xA1F4, 0x56D4, 0xA1B0, 0x568A, 0xA16C, 0x5640, 0xA129, 0x55F6, 0xA0E5, 0x55AB, 0xA0A2, 0x5560, 0xA05F, 0x5515, 0xA01C, 0x54CA, 0x9FDA, 0x547F, 0x9F98, 0x5433, 0x9F56, 0x53E7, 0x9F14, 0x539B, 0x9ED2, 0x534F, 0x9E91, 0x5303, 0x9E50, 0x52B6, 0x9E0F, 0x5269, 0x9DCE, 0x521C, 0x9D8E, 0x51CF, 0x9D4E, 0x5181, 0x9D0E, 0x5134, 0x9CCE, 0x50E6, 0x9C8F, 0x5098, 0x9C50, 0x504A, 0x9C11, 0x4FFB, 0x9BD2, 0x4FAD, 0x9B94, 0x4F5E, 0x9B55, 0x4F0F, 0x9B17, 0x4EC0, 0x9ADA, 0x4E71, 0x9A9C, 0x4E21, 0x9A5F, 0x4DD1, 0x9A22, 0x4D81, 0x99E5, 0x4D31, 0x99A9, 0x4CE1, 0x996D, 0x4C91, 0x9930, 0x4C40, 0x98F5, 0x4BEF, 0x98B9, 0x4B9E, 0x987E, 0x4B4D, 0x9843, 0x4AFB, 0x9808, 0x4AAA, 0x97CE, 0x4A58, 0x9793, 0x4A06, 0x9759, 0x49B4, 0x9720, 0x4962, 0x96E6, 0x490F, 0x96AD, 0x48BD, 0x9674, 0x486A, 0x963B, 0x4817, 0x9603, 0x47C4, 0x95CA, 0x4770, 0x9592, 0x471D, 0x955B, 0x46C9, 0x9523, 0x4675, 0x94EC, 0x4621, 0x94B5, 0x45CD, 0x947E, 0x4579, 0x9448, 0x4524, 0x9412, 0x44D0, 0x93DC, 0x447B, 0x93A6, 0x4426, 0x9371, 0x43D1, 0x933C, 0x437B, 0x9307, 0x4326, 0x92D2, 0x42D0, 0x929E, 0x427A, 0x926A, 0x4224, 0x9236, 0x41CE, 0x9202, 0x4178, 0x91CF, 0x4121, 0x919C, 0x40CB, 0x9169, 0x4074, 0x9137, 0x401D, 0x9105, 0x3FC6, 0x90D3, 0x3F6F, 0x90A1, 0x3F17, 0x9070, 0x3EC0, 0x903E, 0x3E68, 0x900E, 0x3E10, 0x8FDD, 0x3DB8, 0x8FAD, 0x3D60, 0x8F7D, 0x3D08, 0x8F4D, 0x3CAF, 0x8F1D, 0x3C57, 0x8EEE, 0x3BFE, 0x8EBF, 0x3BA5, 0x8E90, 0x3B4C, 0x8E62, 0x3AF3, 0x8E34, 0x3A9A, 0x8E06, 0x3A40, 0x8DD8, 0x39E7, 0x8DAB, 0x398D, 0x8D7E, 0x3933, 0x8D51, 0x38D9, 0x8D24, 0x387F, 0x8CF8, 0x3825, 0x8CCC, 0x37CA, 0x8CA1, 0x3770, 0x8C75, 0x3715, 0x8C4A, 0x36BA, 0x8C1F, 0x365F, 0x8BF5, 0x3604, 0x8BCA, 0x35A9, 0x8BA0, 0x354E, 0x8B77, 0x34F2, 0x8B4D, 0x3497, 0x8B24, 0x343B, 0x8AFB, 0x33DF, 0x8AD3, 0x3383, 0x8AAA, 0x3327, 0x8A82, 0x32CB, 0x8A5A, 0x326E, 0x8A33, 0x3212, 0x8A0C, 0x31B5, 0x89E5, 0x3159, 0x89BE, 0x30FC, 0x8998, 0x309F, 0x8972, 0x3042, 0x894C, 0x2FE5, 0x8927, 0x2F87, 0x8902, 0x2F2A, 0x88DD, 0x2ECC, 0x88B8, 0x2E6F, 0x8894, 0x2E11, 0x8870, 0x2DB3, 0x884C, 0x2D55, 0x8828, 0x2CF7, 0x8805, 0x2C99, 0x87E2, 0x2C3B, 0x87C0, 0x2BDC, 0x879D, 0x2B7E, 0x877B, 0x2B1F, 0x875A, 0x2AC1, 0x8738, 0x2A62, 0x8717, 0x2A03, 0x86F6, 0x29A4, 0x86D6, 0x2945, 0x86B6, 0x28E5, 0x8696, 0x2886, 0x8676, 0x2827, 0x8656, 0x27C7, 0x8637, 0x2768, 0x8619, 0x2708, 0x85FA, 0x26A8, 0x85DC, 0x2648, 0x85BE, 0x25E8, 0x85A0, 0x2588, 0x8583, 0x2528, 0x8566, 0x24C8, 0x8549, 0x2467, 0x852D, 0x2407, 0x8511, 0x23A7, 0x84F5, 0x2346, 0x84D9, 0x22E5, 0x84BE, 0x2284, 0x84A3, 0x2224, 0x8488, 0x21C3, 0x846E, 0x2162, 0x8454, 0x2101, 0x843A, 0x209F, 0x8421, 0x203E, 0x8407, 0x1FDD, 0x83EF, 0x1F7B, 0x83D6, 0x1F1A, 0x83BE, 0x1EB8, 0x83A6, 0x1E57, 0x838E, 0x1DF5, 0x8377, 0x1D93, 0x8360, 0x1D31, 0x8349, 0x1CD0, 0x8332, 0x1C6E, 0x831C, 0x1C0C, 0x8306, 0x1BA9, 0x82F1, 0x1B47, 0x82DB, 0x1AE5, 0x82C6, 0x1A83, 0x82B2, 0x1A20, 0x829D, 0x19BE, 0x8289, 0x195B, 0x8276, 0x18F9, 0x8262, 0x1896, 0x824F, 0x1833, 0x823C, 0x17D1, 0x822A, 0x176E, 0x8217, 0x170B, 0x8205, 0x16A8, 0x81F4, 0x1645, 0x81E2, 0x15E2, 0x81D1, 0x157F, 0x81C1, 0x151C, 0x81B0, 0x14B9, 0x81A0, 0x1455, 0x8190, 0x13F2, 0x8181, 0x138F, 0x8172, 0x132B, 0x8163, 0x12C8, 0x8154, 0x1265, 0x8146, 0x1201, 0x8138, 0x119E, 0x812A, 0x113A, 0x811D, 0x10D6, 0x8110, 0x1073, 0x8103, 0x100F, 0x80F6, 0x0FAB, 0x80EA, 0x0F47, 0x80DE, 0x0EE4, 0x80D3, 0x0E80, 0x80C8, 0x0E1C, 0x80BD, 0x0DB8, 0x80B2, 0x0D54, 0x80A8, 0x0CF0, 0x809E, 0x0C8C, 0x8094, 0x0C28, 0x808B, 0x0BC4, 0x8082, 0x0B60, 0x8079, 0x0AFB, 0x8070, 0x0A97, 0x8068, 0x0A33, 0x8060, 0x09CF, 0x8059, 0x096B, 0x8052, 0x0906, 0x804B, 0x08A2, 0x8044, 0x083E, 0x803E, 0x07D9, 0x8038, 0x0775, 0x8032, 0x0711, 0x802D, 0x06AC, 0x8027, 0x0648, 0x8023, 0x05E3, 0x801E, 0x057F, 0x801A, 0x051B, 0x8016, 0x04B6, 0x8013, 0x0452, 0x800F, 0x03ED, 0x800C, 0x0389, 0x800A, 0x0324, 0x8008, 0x02C0, 0x8006, 0x025B, 0x8004, 0x01F7, 0x8002, 0x0192, 0x8001, 0x012E, 0x8001, 0x00C9, 0x8000, 0x0065, 0x8000, 0x0000, 0x8000, 0xFF9B, 0x8001, 0xFF37, 0x8001, 0xFED2, 0x8002, 0xFE6E, 0x8004, 0xFE09, 0x8006, 0xFDA5, 0x8008, 0xFD40, 0x800A, 0xFCDC, 0x800C, 0xFC77, 0x800F, 0xFC13, 0x8013, 0xFBAE, 0x8016, 0xFB4A, 0x801A, 0xFAE5, 0x801E, 0xFA81, 0x8023, 0xFA1D, 0x8027, 0xF9B8, 0x802D, 0xF954, 0x8032, 0xF8EF, 0x8038, 0xF88B, 0x803E, 0xF827, 0x8044, 0xF7C2, 0x804B, 0xF75E, 0x8052, 0xF6FA, 0x8059, 0xF695, 0x8060, 0xF631, 0x8068, 0xF5CD, 0x8070, 0xF569, 0x8079, 0xF505, 0x8082, 0xF4A0, 0x808B, 0xF43C, 0x8094, 0xF3D8, 0x809E, 0xF374, 0x80A8, 0xF310, 0x80B2, 0xF2AC, 0x80BD, 0xF248, 0x80C8, 0xF1E4, 0x80D3, 0xF180, 0x80DE, 0xF11C, 0x80EA, 0xF0B9, 0x80F6, 0xF055, 0x8103, 0xEFF1, 0x8110, 0xEF8D, 0x811D, 0xEF2A, 0x812A, 0xEEC6, 0x8138, 0xEE62, 0x8146, 0xEDFF, 0x8154, 0xED9B, 0x8163, 0xED38, 0x8172, 0xECD5, 0x8181, 0xEC71, 0x8190, 0xEC0E, 0x81A0, 0xEBAB, 0x81B0, 0xEB47, 0x81C1, 0xEAE4, 0x81D1, 0xEA81, 0x81E2, 0xEA1E, 0x81F4, 0xE9BB, 0x8205, 0xE958, 0x8217, 0xE8F5, 0x822A, 0xE892, 0x823C, 0xE82F, 0x824F, 0xE7CD, 0x8262, 0xE76A, 0x8276, 0xE707, 0x8289, 0xE6A5, 0x829D, 0xE642, 0x82B2, 0xE5E0, 0x82C6, 0xE57D, 0x82DB, 0xE51B, 0x82F1, 0xE4B9, 0x8306, 0xE457, 0x831C, 0xE3F4, 0x8332, 0xE392, 0x8349, 0xE330, 0x8360, 0xE2CF, 0x8377, 0xE26D, 0x838E, 0xE20B, 0x83A6, 0xE1A9, 0x83BE, 0xE148, 0x83D6, 0xE0E6, 0x83EF, 0xE085, 0x8407, 0xE023, 0x8421, 0xDFC2, 0x843A, 0xDF61, 0x8454, 0xDEFF, 0x846E, 0xDE9E, 0x8488, 0xDE3D, 0x84A3, 0xDDDC, 0x84BE, 0xDD7C, 0x84D9, 0xDD1B, 0x84F5, 0xDCBA, 0x8511, 0xDC59, 0x852D, 0xDBF9, 0x8549, 0xDB99, 0x8566, 0xDB38, 0x8583, 0xDAD8, 0x85A0, 0xDA78, 0x85BE, 0xDA18, 0x85DC, 0xD9B8, 0x85FA, 0xD958, 0x8619, 0xD8F8, 0x8637, 0xD898, 0x8656, 0xD839, 0x8676, 0xD7D9, 0x8696, 0xD77A, 0x86B6, 0xD71B, 0x86D6, 0xD6BB, 0x86F6, 0xD65C, 0x8717, 0xD5FD, 0x8738, 0xD59E, 0x875A, 0xD53F, 0x877B, 0xD4E1, 0x879D, 0xD482, 0x87C0, 0xD424, 0x87E2, 0xD3C5, 0x8805, 0xD367, 0x8828, 0xD309, 0x884C, 0xD2AB, 0x8870, 0xD24D, 0x8894, 0xD1EF, 0x88B8, 0xD191, 0x88DD, 0xD134, 0x8902, 0xD0D6, 0x8927, 0xD079, 0x894C, 0xD01B, 0x8972, 0xCFBE, 0x8998, 0xCF61, 0x89BE, 0xCF04, 0x89E5, 0xCEA7, 0x8A0C, 0xCE4B, 0x8A33, 0xCDEE, 0x8A5A, 0xCD92, 0x8A82, 0xCD35, 0x8AAA, 0xCCD9, 0x8AD3, 0xCC7D, 0x8AFB, 0xCC21, 0x8B24, 0xCBC5, 0x8B4D, 0xCB69, 0x8B77, 0xCB0E, 0x8BA0, 0xCAB2, 0x8BCA, 0xCA57, 0x8BF5, 0xC9FC, 0x8C1F, 0xC9A1, 0x8C4A, 0xC946, 0x8C75, 0xC8EB, 0x8CA1, 0xC890, 0x8CCC, 0xC836, 0x8CF8, 0xC7DB, 0x8D24, 0xC781, 0x8D51, 0xC727, 0x8D7E, 0xC6CD, 0x8DAB, 0xC673, 0x8DD8, 0xC619, 0x8E06, 0xC5C0, 0x8E34, 0xC566, 0x8E62, 0xC50D, 0x8E90, 0xC4B4, 0x8EBF, 0xC45B, 0x8EEE, 0xC402, 0x8F1D, 0xC3A9, 0x8F4D, 0xC351, 0x8F7D, 0xC2F8, 0x8FAD, 0xC2A0, 0x8FDD, 0xC248, 0x900E, 0xC1F0, 0x903E, 0xC198, 0x9070, 0xC140, 0x90A1, 0xC0E9, 0x90D3, 0xC091, 0x9105, 0xC03A, 0x9137, 0xBFE3, 0x9169, 0xBF8C, 0x919C, 0xBF35, 0x91CF, 0xBEDF, 0x9202, 0xBE88, 0x9236, 0xBE32, 0x926A, 0xBDDC, 0x929E, 0xBD86, 0x92D2, 0xBD30, 0x9307, 0xBCDA, 0x933C, 0xBC85, 0x9371, 0xBC2F, 0x93A6, 0xBBDA, 0x93DC, 0xBB85, 0x9412, 0xBB30, 0x9448, 0xBADC, 0x947E, 0xBA87, 0x94B5, 0xBA33, 0x94EC, 0xB9DF, 0x9523, 0xB98B, 0x955B, 0xB937, 0x9592, 0xB8E3, 0x95CA, 0xB890, 0x9603, 0xB83C, 0x963B, 0xB7E9, 0x9674, 0xB796, 0x96AD, 0xB743, 0x96E6, 0xB6F1, 0x9720, 0xB69E, 0x9759, 0xB64C, 0x9793, 0xB5FA, 0x97CE, 0xB5A8, 0x9808, 0xB556, 0x9843, 0xB505, 0x987E, 0xB4B3, 0x98B9, 0xB462, 0x98F5, 0xB411, 0x9930, 0xB3C0, 0x996D, 0xB36F, 0x99A9, 0xB31F, 0x99E5, 0xB2CF, 0x9A22, 0xB27F, 0x9A5F, 0xB22F, 0x9A9C, 0xB1DF, 0x9ADA, 0xB18F, 0x9B17, 0xB140, 0x9B55, 0xB0F1, 0x9B94, 0xB0A2, 0x9BD2, 0xB053, 0x9C11, 0xB005, 0x9C50, 0xAFB6, 0x9C8F, 0xAF68, 0x9CCE, 0xAF1A, 0x9D0E, 0xAECC, 0x9D4E, 0xAE7F, 0x9D8E, 0xAE31, 0x9DCE, 0xADE4, 0x9E0F, 0xAD97, 0x9E50, 0xAD4A, 0x9E91, 0xACFD, 0x9ED2, 0xACB1, 0x9F14, 0xAC65, 0x9F56, 0xAC19, 0x9F98, 0xABCD, 0x9FDA, 0xAB81, 0xA01C, 0xAB36, 0xA05F, 0xAAEB, 0xA0A2, 0xAAA0, 0xA0E5, 0xAA55, 0xA129, 0xAA0A, 0xA16C, 0xA9C0, 0xA1B0, 0xA976, 0xA1F4, 0xA92C, 0xA238, 0xA8E2, 0xA27D, 0xA899, 0xA2C2, 0xA84F, 0xA307, 0xA806, 0xA34C, 0xA7BD, 0xA391, 0xA774, 0xA3D7, 0xA72C, 0xA41D, 0xA6E4, 0xA463, 0xA69C, 0xA4A9, 0xA654, 0xA4F0, 0xA60C, 0xA537, 0xA5C5, 0xA57E, 0xA57E, 0xA5C5, 0xA537, 0xA60C, 0xA4F0, 0xA654, 0xA4A9, 0xA69C, 0xA463, 0xA6E4, 0xA41D, 0xA72C, 0xA3D7, 0xA774, 0xA391, 0xA7BD, 0xA34C, 0xA806, 0xA307, 0xA84F, 0xA2C2, 0xA899, 0xA27D, 0xA8E2, 0xA238, 0xA92C, 0xA1F4, 0xA976, 0xA1B0, 0xA9C0, 0xA16C, 0xAA0A, 0xA129, 0xAA55, 0xA0E5, 0xAAA0, 0xA0A2, 0xAAEB, 0xA05F, 0xAB36, 0xA01C, 0xAB81, 0x9FDA, 0xABCD, 0x9F98, 0xAC19, 0x9F56, 0xAC65, 0x9F14, 0xACB1, 0x9ED2, 0xACFD, 0x9E91, 0xAD4A, 0x9E50, 0xAD97, 0x9E0F, 0xADE4, 0x9DCE, 0xAE31, 0x9D8E, 0xAE7F, 0x9D4E, 0xAECC, 0x9D0E, 0xAF1A, 0x9CCE, 0xAF68, 0x9C8F, 0xAFB6, 0x9C50, 0xB005, 0x9C11, 0xB053, 0x9BD2, 0xB0A2, 0x9B94, 0xB0F1, 0x9B55, 0xB140, 0x9B17, 0xB18F, 0x9ADA, 0xB1DF, 0x9A9C, 0xB22F, 0x9A5F, 0xB27F, 0x9A22, 0xB2CF, 0x99E5, 0xB31F, 0x99A9, 0xB36F, 0x996D, 0xB3C0, 0x9930, 0xB411, 0x98F5, 0xB462, 0x98B9, 0xB4B3, 0x987E, 0xB505, 0x9843, 0xB556, 0x9808, 0xB5A8, 0x97CE, 0xB5FA, 0x9793, 0xB64C, 0x9759, 0xB69E, 0x9720, 0xB6F1, 0x96E6, 0xB743, 0x96AD, 0xB796, 0x9674, 0xB7E9, 0x963B, 0xB83C, 0x9603, 0xB890, 0x95CA, 0xB8E3, 0x9592, 0xB937, 0x955B, 0xB98B, 0x9523, 0xB9DF, 0x94EC, 0xBA33, 0x94B5, 0xBA87, 0x947E, 0xBADC, 0x9448, 0xBB30, 0x9412, 0xBB85, 0x93DC, 0xBBDA, 0x93A6, 0xBC2F, 0x9371, 0xBC85, 0x933C, 0xBCDA, 0x9307, 0xBD30, 0x92D2, 0xBD86, 0x929E, 0xBDDC, 0x926A, 0xBE32, 0x9236, 0xBE88, 0x9202, 0xBEDF, 0x91CF, 0xBF35, 0x919C, 0xBF8C, 0x9169, 0xBFE3, 0x9137, 0xC03A, 0x9105, 0xC091, 0x90D3, 0xC0E9, 0x90A1, 0xC140, 0x9070, 0xC198, 0x903E, 0xC1F0, 0x900E, 0xC248, 0x8FDD, 0xC2A0, 0x8FAD, 0xC2F8, 0x8F7D, 0xC351, 0x8F4D, 0xC3A9, 0x8F1D, 0xC402, 0x8EEE, 0xC45B, 0x8EBF, 0xC4B4, 0x8E90, 0xC50D, 0x8E62, 0xC566, 0x8E34, 0xC5C0, 0x8E06, 0xC619, 0x8DD8, 0xC673, 0x8DAB, 0xC6CD, 0x8D7E, 0xC727, 0x8D51, 0xC781, 0x8D24, 0xC7DB, 0x8CF8, 0xC836, 0x8CCC, 0xC890, 0x8CA1, 0xC8EB, 0x8C75, 0xC946, 0x8C4A, 0xC9A1, 0x8C1F, 0xC9FC, 0x8BF5, 0xCA57, 0x8BCA, 0xCAB2, 0x8BA0, 0xCB0E, 0x8B77, 0xCB69, 0x8B4D, 0xCBC5, 0x8B24, 0xCC21, 0x8AFB, 0xCC7D, 0x8AD3, 0xCCD9, 0x8AAA, 0xCD35, 0x8A82, 0xCD92, 0x8A5A, 0xCDEE, 0x8A33, 0xCE4B, 0x8A0C, 0xCEA7, 0x89E5, 0xCF04, 0x89BE, 0xCF61, 0x8998, 0xCFBE, 0x8972, 0xD01B, 0x894C, 0xD079, 0x8927, 0xD0D6, 0x8902, 0xD134, 0x88DD, 0xD191, 0x88B8, 0xD1EF, 0x8894, 0xD24D, 0x8870, 0xD2AB, 0x884C, 0xD309, 0x8828, 0xD367, 0x8805, 0xD3C5, 0x87E2, 0xD424, 0x87C0, 0xD482, 0x879D, 0xD4E1, 0x877B, 0xD53F, 0x875A, 0xD59E, 0x8738, 0xD5FD, 0x8717, 0xD65C, 0x86F6, 0xD6BB, 0x86D6, 0xD71B, 0x86B6, 0xD77A, 0x8696, 0xD7D9, 0x8676, 0xD839, 0x8656, 0xD898, 0x8637, 0xD8F8, 0x8619, 0xD958, 0x85FA, 0xD9B8, 0x85DC, 0xDA18, 0x85BE, 0xDA78, 0x85A0, 0xDAD8, 0x8583, 0xDB38, 0x8566, 0xDB99, 0x8549, 0xDBF9, 0x852D, 0xDC59, 0x8511, 0xDCBA, 0x84F5, 0xDD1B, 0x84D9, 0xDD7C, 0x84BE, 0xDDDC, 0x84A3, 0xDE3D, 0x8488, 0xDE9E, 0x846E, 0xDEFF, 0x8454, 0xDF61, 0x843A, 0xDFC2, 0x8421, 0xE023, 0x8407, 0xE085, 0x83EF, 0xE0E6, 0x83D6, 0xE148, 0x83BE, 0xE1A9, 0x83A6, 0xE20B, 0x838E, 0xE26D, 0x8377, 0xE2CF, 0x8360, 0xE330, 0x8349, 0xE392, 0x8332, 0xE3F4, 0x831C, 0xE457, 0x8306, 0xE4B9, 0x82F1, 0xE51B, 0x82DB, 0xE57D, 0x82C6, 0xE5E0, 0x82B2, 0xE642, 0x829D, 0xE6A5, 0x8289, 0xE707, 0x8276, 0xE76A, 0x8262, 0xE7CD, 0x824F, 0xE82F, 0x823C, 0xE892, 0x822A, 0xE8F5, 0x8217, 0xE958, 0x8205, 0xE9BB, 0x81F4, 0xEA1E, 0x81E2, 0xEA81, 0x81D1, 0xEAE4, 0x81C1, 0xEB47, 0x81B0, 0xEBAB, 0x81A0, 0xEC0E, 0x8190, 0xEC71, 0x8181, 0xECD5, 0x8172, 0xED38, 0x8163, 0xED9B, 0x8154, 0xEDFF, 0x8146, 0xEE62, 0x8138, 0xEEC6, 0x812A, 0xEF2A, 0x811D, 0xEF8D, 0x8110, 0xEFF1, 0x8103, 0xF055, 0x80F6, 0xF0B9, 0x80EA, 0xF11C, 0x80DE, 0xF180, 0x80D3, 0xF1E4, 0x80C8, 0xF248, 0x80BD, 0xF2AC, 0x80B2, 0xF310, 0x80A8, 0xF374, 0x809E, 0xF3D8, 0x8094, 0xF43C, 0x808B, 0xF4A0, 0x8082, 0xF505, 0x8079, 0xF569, 0x8070, 0xF5CD, 0x8068, 0xF631, 0x8060, 0xF695, 0x8059, 0xF6FA, 0x8052, 0xF75E, 0x804B, 0xF7C2, 0x8044, 0xF827, 0x803E, 0xF88B, 0x8038, 0xF8EF, 0x8032, 0xF954, 0x802D, 0xF9B8, 0x8027, 0xFA1D, 0x8023, 0xFA81, 0x801E, 0xFAE5, 0x801A, 0xFB4A, 0x8016, 0xFBAE, 0x8013, 0xFC13, 0x800F, 0xFC77, 0x800C, 0xFCDC, 0x800A, 0xFD40, 0x8008, 0xFDA5, 0x8006, 0xFE09, 0x8004, 0xFE6E, 0x8002, 0xFED2, 0x8001, 0xFF37, 0x8001, 0xFF9B, 0x8000, 0x0000, 0x8000, 0x0065, 0x8000, 0x00C9, 0x8001, 0x012E, 0x8001, 0x0192, 0x8002, 0x01F7, 0x8004, 0x025B, 0x8006, 0x02C0, 0x8008, 0x0324, 0x800A, 0x0389, 0x800C, 0x03ED, 0x800F, 0x0452, 0x8013, 0x04B6, 0x8016, 0x051B, 0x801A, 0x057F, 0x801E, 0x05E3, 0x8023, 0x0648, 0x8027, 0x06AC, 0x802D, 0x0711, 0x8032, 0x0775, 0x8038, 0x07D9, 0x803E, 0x083E, 0x8044, 0x08A2, 0x804B, 0x0906, 0x8052, 0x096B, 0x8059, 0x09CF, 0x8060, 0x0A33, 0x8068, 0x0A97, 0x8070, 0x0AFB, 0x8079, 0x0B60, 0x8082, 0x0BC4, 0x808B, 0x0C28, 0x8094, 0x0C8C, 0x809E, 0x0CF0, 0x80A8, 0x0D54, 0x80B2, 0x0DB8, 0x80BD, 0x0E1C, 0x80C8, 0x0E80, 0x80D3, 0x0EE4, 0x80DE, 0x0F47, 0x80EA, 0x0FAB, 0x80F6, 0x100F, 0x8103, 0x1073, 0x8110, 0x10D6, 0x811D, 0x113A, 0x812A, 0x119E, 0x8138, 0x1201, 0x8146, 0x1265, 0x8154, 0x12C8, 0x8163, 0x132B, 0x8172, 0x138F, 0x8181, 0x13F2, 0x8190, 0x1455, 0x81A0, 0x14B9, 0x81B0, 0x151C, 0x81C1, 0x157F, 0x81D1, 0x15E2, 0x81E2, 0x1645, 0x81F4, 0x16A8, 0x8205, 0x170B, 0x8217, 0x176E, 0x822A, 0x17D1, 0x823C, 0x1833, 0x824F, 0x1896, 0x8262, 0x18F9, 0x8276, 0x195B, 0x8289, 0x19BE, 0x829D, 0x1A20, 0x82B2, 0x1A83, 0x82C6, 0x1AE5, 0x82DB, 0x1B47, 0x82F1, 0x1BA9, 0x8306, 0x1C0C, 0x831C, 0x1C6E, 0x8332, 0x1CD0, 0x8349, 0x1D31, 0x8360, 0x1D93, 0x8377, 0x1DF5, 0x838E, 0x1E57, 0x83A6, 0x1EB8, 0x83BE, 0x1F1A, 0x83D6, 0x1F7B, 0x83EF, 0x1FDD, 0x8407, 0x203E, 0x8421, 0x209F, 0x843A, 0x2101, 0x8454, 0x2162, 0x846E, 0x21C3, 0x8488, 0x2224, 0x84A3, 0x2284, 0x84BE, 0x22E5, 0x84D9, 0x2346, 0x84F5, 0x23A7, 0x8511, 0x2407, 0x852D, 0x2467, 0x8549, 0x24C8, 0x8566, 0x2528, 0x8583, 0x2588, 0x85A0, 0x25E8, 0x85BE, 0x2648, 0x85DC, 0x26A8, 0x85FA, 0x2708, 0x8619, 0x2768, 0x8637, 0x27C7, 0x8656, 0x2827, 0x8676, 0x2886, 0x8696, 0x28E5, 0x86B6, 0x2945, 0x86D6, 0x29A4, 0x86F6, 0x2A03, 0x8717, 0x2A62, 0x8738, 0x2AC1, 0x875A, 0x2B1F, 0x877B, 0x2B7E, 0x879D, 0x2BDC, 0x87C0, 0x2C3B, 0x87E2, 0x2C99, 0x8805, 0x2CF7, 0x8828, 0x2D55, 0x884C, 0x2DB3, 0x8870, 0x2E11, 0x8894, 0x2E6F, 0x88B8, 0x2ECC, 0x88DD, 0x2F2A, 0x8902, 0x2F87, 0x8927, 0x2FE5, 0x894C, 0x3042, 0x8972, 0x309F, 0x8998, 0x30FC, 0x89BE, 0x3159, 0x89E5, 0x31B5, 0x8A0C, 0x3212, 0x8A33, 0x326E, 0x8A5A, 0x32CB, 0x8A82, 0x3327, 0x8AAA, 0x3383, 0x8AD3, 0x33DF, 0x8AFB, 0x343B, 0x8B24, 0x3497, 0x8B4D, 0x34F2, 0x8B77, 0x354E, 0x8BA0, 0x35A9, 0x8BCA, 0x3604, 0x8BF5, 0x365F, 0x8C1F, 0x36BA, 0x8C4A, 0x3715, 0x8C75, 0x3770, 0x8CA1, 0x37CA, 0x8CCC, 0x3825, 0x8CF8, 0x387F, 0x8D24, 0x38D9, 0x8D51, 0x3933, 0x8D7E, 0x398D, 0x8DAB, 0x39E7, 0x8DD8, 0x3A40, 0x8E06, 0x3A9A, 0x8E34, 0x3AF3, 0x8E62, 0x3B4C, 0x8E90, 0x3BA5, 0x8EBF, 0x3BFE, 0x8EEE, 0x3C57, 0x8F1D, 0x3CAF, 0x8F4D, 0x3D08, 0x8F7D, 0x3D60, 0x8FAD, 0x3DB8, 0x8FDD, 0x3E10, 0x900E, 0x3E68, 0x903E, 0x3EC0, 0x9070, 0x3F17, 0x90A1, 0x3F6F, 0x90D3, 0x3FC6, 0x9105, 0x401D, 0x9137, 0x4074, 0x9169, 0x40CB, 0x919C, 0x4121, 0x91CF, 0x4178, 0x9202, 0x41CE, 0x9236, 0x4224, 0x926A, 0x427A, 0x929E, 0x42D0, 0x92D2, 0x4326, 0x9307, 0x437B, 0x933C, 0x43D1, 0x9371, 0x4426, 0x93A6, 0x447B, 0x93DC, 0x44D0, 0x9412, 0x4524, 0x9448, 0x4579, 0x947E, 0x45CD, 0x94B5, 0x4621, 0x94EC, 0x4675, 0x9523, 0x46C9, 0x955B, 0x471D, 0x9592, 0x4770, 0x95CA, 0x47C4, 0x9603, 0x4817, 0x963B, 0x486A, 0x9674, 0x48BD, 0x96AD, 0x490F, 0x96E6, 0x4962, 0x9720, 0x49B4, 0x9759, 0x4A06, 0x9793, 0x4A58, 0x97CE, 0x4AAA, 0x9808, 0x4AFB, 0x9843, 0x4B4D, 0x987E, 0x4B9E, 0x98B9, 0x4BEF, 0x98F5, 0x4C40, 0x9930, 0x4C91, 0x996D, 0x4CE1, 0x99A9, 0x4D31, 0x99E5, 0x4D81, 0x9A22, 0x4DD1, 0x9A5F, 0x4E21, 0x9A9C, 0x4E71, 0x9ADA, 0x4EC0, 0x9B17, 0x4F0F, 0x9B55, 0x4F5E, 0x9B94, 0x4FAD, 0x9BD2, 0x4FFB, 0x9C11, 0x504A, 0x9C50, 0x5098, 0x9C8F, 0x50E6, 0x9CCE, 0x5134, 0x9D0E, 0x5181, 0x9D4E, 0x51CF, 0x9D8E, 0x521C, 0x9DCE, 0x5269, 0x9E0F, 0x52B6, 0x9E50, 0x5303, 0x9E91, 0x534F, 0x9ED2, 0x539B, 0x9F14, 0x53E7, 0x9F56, 0x5433, 0x9F98, 0x547F, 0x9FDA, 0x54CA, 0xA01C, 0x5515, 0xA05F, 0x5560, 0xA0A2, 0x55AB, 0xA0E5, 0x55F6, 0xA129, 0x5640, 0xA16C, 0x568A, 0xA1B0, 0x56D4, 0xA1F4, 0x571E, 0xA238, 0x5767, 0xA27D, 0x57B1, 0xA2C2, 0x57FA, 0xA307, 0x5843, 0xA34C, 0x588C, 0xA391, 0x58D4, 0xA3D7, 0x591C, 0xA41D, 0x5964, 0xA463, 0x59AC, 0xA4A9, 0x59F4, 0xA4F0, 0x5A3B, 0xA537, 0x5A82, 0xA57E, 0x5AC9, 0xA5C5, 0x5B10, 0xA60C, 0x5B57, 0xA654, 0x5B9D, 0xA69C, 0x5BE3, 0xA6E4, 0x5C29, 0xA72C, 0x5C6F, 0xA774, 0x5CB4, 0xA7BD, 0x5CF9, 0xA806, 0x5D3E, 0xA84F, 0x5D83, 0xA899, 0x5DC8, 0xA8E2, 0x5E0C, 0xA92C, 0x5E50, 0xA976, 0x5E94, 0xA9C0, 0x5ED7, 0xAA0A, 0x5F1B, 0xAA55, 0x5F5E, 0xAAA0, 0x5FA1, 0xAAEB, 0x5FE4, 0xAB36, 0x6026, 0xAB81, 0x6068, 0xABCD, 0x60AA, 0xAC19, 0x60EC, 0xAC65, 0x612E, 0xACB1, 0x616F, 0xACFD, 0x61B0, 0xAD4A, 0x61F1, 0xAD97, 0x6232, 0xADE4, 0x6272, 0xAE31, 0x62B2, 0xAE7F, 0x62F2, 0xAECC, 0x6332, 0xAF1A, 0x6371, 0xAF68, 0x63B0, 0xAFB6, 0x63EF, 0xB005, 0x642E, 0xB053, 0x646C, 0xB0A2, 0x64AB, 0xB0F1, 0x64E9, 0xB140, 0x6526, 0xB18F, 0x6564, 0xB1DF, 0x65A1, 0xB22F, 0x65DE, 0xB27F, 0x661B, 0xB2CF, 0x6657, 0xB31F, 0x6693, 0xB36F, 0x66D0, 0xB3C0, 0x670B, 0xB411, 0x6747, 0xB462, 0x6782, 0xB4B3, 0x67BD, 0xB505, 0x67F8, 0xB556, 0x6832, 0xB5A8, 0x686D, 0xB5FA, 0x68A7, 0xB64C, 0x68E0, 0xB69E, 0x691A, 0xB6F1, 0x6953, 0xB743, 0x698C, 0xB796, 0x69C5, 0xB7E9, 0x69FD, 0xB83C, 0x6A36, 0xB890, 0x6A6E, 0xB8E3, 0x6AA5, 0xB937, 0x6ADD, 0xB98B, 0x6B14, 0xB9DF, 0x6B4B, 0xBA33, 0x6B82, 0xBA87, 0x6BB8, 0xBADC, 0x6BEE, 0xBB30, 0x6C24, 0xBB85, 0x6C5A, 0xBBDA, 0x6C8F, 0xBC2F, 0x6CC4, 0xBC85, 0x6CF9, 0xBCDA, 0x6D2E, 0xBD30, 0x6D62, 0xBD86, 0x6D96, 0xBDDC, 0x6DCA, 0xBE32, 0x6DFE, 0xBE88, 0x6E31, 0xBEDF, 0x6E64, 0xBF35, 0x6E97, 0xBF8C, 0x6EC9, 0xBFE3, 0x6EFB, 0xC03A, 0x6F2D, 0xC091, 0x6F5F, 0xC0E9, 0x6F90, 0xC140, 0x6FC2, 0xC198, 0x6FF2, 0xC1F0, 0x7023, 0xC248, 0x7053, 0xC2A0, 0x7083, 0xC2F8, 0x70B3, 0xC351, 0x70E3, 0xC3A9, 0x7112, 0xC402, 0x7141, 0xC45B, 0x7170, 0xC4B4, 0x719E, 0xC50D, 0x71CC, 0xC566, 0x71FA, 0xC5C0, 0x7228, 0xC619, 0x7255, 0xC673, 0x7282, 0xC6CD, 0x72AF, 0xC727, 0x72DC, 0xC781, 0x7308, 0xC7DB, 0x7334, 0xC836, 0x735F, 0xC890, 0x738B, 0xC8EB, 0x73B6, 0xC946, 0x73E1, 0xC9A1, 0x740B, 0xC9FC, 0x7436, 0xCA57, 0x7460, 0xCAB2, 0x7489, 0xCB0E, 0x74B3, 0xCB69, 0x74DC, 0xCBC5, 0x7505, 0xCC21, 0x752D, 0xCC7D, 0x7556, 0xCCD9, 0x757E, 0xCD35, 0x75A6, 0xCD92, 0x75CD, 0xCDEE, 0x75F4, 0xCE4B, 0x761B, 0xCEA7, 0x7642, 0xCF04, 0x7668, 0xCF61, 0x768E, 0xCFBE, 0x76B4, 0xD01B, 0x76D9, 0xD079, 0x76FE, 0xD0D6, 0x7723, 0xD134, 0x7748, 0xD191, 0x776C, 0xD1EF, 0x7790, 0xD24D, 0x77B4, 0xD2AB, 0x77D8, 0xD309, 0x77FB, 0xD367, 0x781E, 0xD3C5, 0x7840, 0xD424, 0x7863, 0xD482, 0x7885, 0xD4E1, 0x78A6, 0xD53F, 0x78C8, 0xD59E, 0x78E9, 0xD5FD, 0x790A, 0xD65C, 0x792A, 0xD6BB, 0x794A, 0xD71B, 0x796A, 0xD77A, 0x798A, 0xD7D9, 0x79AA, 0xD839, 0x79C9, 0xD898, 0x79E7, 0xD8F8, 0x7A06, 0xD958, 0x7A24, 0xD9B8, 0x7A42, 0xDA18, 0x7A60, 0xDA78, 0x7A7D, 0xDAD8, 0x7A9A, 0xDB38, 0x7AB7, 0xDB99, 0x7AD3, 0xDBF9, 0x7AEF, 0xDC59, 0x7B0B, 0xDCBA, 0x7B27, 0xDD1B, 0x7B42, 0xDD7C, 0x7B5D, 0xDDDC, 0x7B78, 0xDE3D, 0x7B92, 0xDE9E, 0x7BAC, 0xDEFF, 0x7BC6, 0xDF61, 0x7BDF, 0xDFC2, 0x7BF9, 0xE023, 0x7C11, 0xE085, 0x7C2A, 0xE0E6, 0x7C42, 0xE148, 0x7C5A, 0xE1A9, 0x7C72, 0xE20B, 0x7C89, 0xE26D, 0x7CA0, 0xE2CF, 0x7CB7, 0xE330, 0x7CCE, 0xE392, 0x7CE4, 0xE3F4, 0x7CFA, 0xE457, 0x7D0F, 0xE4B9, 0x7D25, 0xE51B, 0x7D3A, 0xE57D, 0x7D4E, 0xE5E0, 0x7D63, 0xE642, 0x7D77, 0xE6A5, 0x7D8A, 0xE707, 0x7D9E, 0xE76A, 0x7DB1, 0xE7CD, 0x7DC4, 0xE82F, 0x7DD6, 0xE892, 0x7DE9, 0xE8F5, 0x7DFB, 0xE958, 0x7E0C, 0xE9BB, 0x7E1E, 0xEA1E, 0x7E2F, 0xEA81, 0x7E3F, 0xEAE4, 0x7E50, 0xEB47, 0x7E60, 0xEBAB, 0x7E70, 0xEC0E, 0x7E7F, 0xEC71, 0x7E8E, 0xECD5, 0x7E9D, 0xED38, 0x7EAC, 0xED9B, 0x7EBA, 0xEDFF, 0x7EC8, 0xEE62, 0x7ED6, 0xEEC6, 0x7EE3, 0xEF2A, 0x7EF0, 0xEF8D, 0x7EFD, 0xEFF1, 0x7F0A, 0xF055, 0x7F16, 0xF0B9, 0x7F22, 0xF11C, 0x7F2D, 0xF180, 0x7F38, 0xF1E4, 0x7F43, 0xF248, 0x7F4E, 0xF2AC, 0x7F58, 0xF310, 0x7F62, 0xF374, 0x7F6C, 0xF3D8, 0x7F75, 0xF43C, 0x7F7E, 0xF4A0, 0x7F87, 0xF505, 0x7F90, 0xF569, 0x7F98, 0xF5CD, 0x7FA0, 0xF631, 0x7FA7, 0xF695, 0x7FAE, 0xF6FA, 0x7FB5, 0xF75E, 0x7FBC, 0xF7C2, 0x7FC2, 0xF827, 0x7FC8, 0xF88B, 0x7FCE, 0xF8EF, 0x7FD3, 0xF954, 0x7FD9, 0xF9B8, 0x7FDD, 0xFA1D, 0x7FE2, 0xFA81, 0x7FE6, 0xFAE5, 0x7FEA, 0xFB4A, 0x7FED, 0xFBAE, 0x7FF1, 0xFC13, 0x7FF4, 0xFC77, 0x7FF6, 0xFCDC, 0x7FF8, 0xFD40, 0x7FFA, 0xFDA5, 0x7FFC, 0xFE09, 0x7FFE, 0xFE6E, 0x7FFF, 0xFED2, 0x7FFF, 0xFF37, 0x7FFF, 0xFF9B, }; #elif defined(MAX_FFT_SIZE_1024) const INT16 radix4FftTwiddleArr[2*MAX_FFT_SIZE] = {0X7FFF,0X0000,0X7FFF,0X00C9,0X7FFE,0X0192,0X7FFA,0X025B,0X7FF6,0X0324,0X7FF1,0X03ED,0X7FEA,0X04B6,0X7FE2,0X057F,0X7FD9,0X0648,0X7FCE,0X0711, 0X7FC2,0X07D9,0X7FB5,0X08A2,0X7FA7,0X096B,0X7F98,0X0A33,0X7F87,0X0AFB,0X7F75,0X0BC4,0X7F62,0X0C8C,0X7F4E,0X0D54,0X7F38,0X0E1C,0X7F22,0X0EE4, 0X7F0A,0X0FAB,0X7EF0,0X1073,0X7ED6,0X113A,0X7EBA,0X1201,0X7E9D,0X12C8,0X7E7F,0X138F,0X7E60,0X1455,0X7E3F,0X151C,0X7E1E,0X15E2,0X7DFB,0X16A8, 0X7DD6,0X176E,0X7DB1,0X1833,0X7D8A,0X18F9,0X7D63,0X19BE,0X7D3A,0X1A83,0X7D0F,0X1B47,0X7CE4,0X1C0C,0X7CB7,0X1CD0,0X7C89,0X1D93,0X7C5A,0X1E57, 0X7C2A,0X1F1A,0X7BF9,0X1FDD,0X7BC6,0X209F,0X7B92,0X2162,0X7B5D,0X2224,0X7B27,0X22E5,0X7AEF,0X23A7,0X7AB7,0X2467,0X7A7D,0X2528,0X7A42,0X25E8, 0X7A06,0X26A8,0X79C9,0X2768,0X798A,0X2827,0X794A,0X28E5,0X790A,0X29A4,0X78C8,0X2A62,0X7885,0X2B1F,0X7840,0X2BDC,0X77FB,0X2C99,0X77B4,0X2D55, 0X776C,0X2E11,0X7723,0X2ECC,0X76D9,0X2F87,0X768E,0X3042,0X7642,0X30FC,0X75F4,0X31B5,0X75A6,0X326E,0X7556,0X3327,0X7505,0X33DF,0X74B3,0X3497, 0X7460,0X354E,0X740B,0X3604,0X73B6,0X36BA,0X735F,0X3770,0X7308,0X3825,0X72AF,0X38D9,0X7255,0X398D,0X71FA,0X3A40,0X719E,0X3AF3,0X7141,0X3BA5, 0X70E3,0X3C57,0X7083,0X3D08,0X7023,0X3DB8,0X6FC2,0X3E68,0X6F5F,0X3F17,0X6EFB,0X3FC6,0X6E97,0X4074,0X6E31,0X4121,0X6DCA,0X41CE,0X6D62,0X427A, 0X6CF9,0X4326,0X6C8F,0X43D1,0X6C24,0X447B,0X6BB8,0X4524,0X6B4B,0X45CD,0X6ADD,0X4675,0X6A6E,0X471D,0X69FD,0X47C4,0X698C,0X486A,0X691A,0X490F, 0X68A7,0X49B4,0X6832,0X4A58,0X67BD,0X4AFB,0X6747,0X4B9E,0X66D0,0X4C40,0X6657,0X4CE1,0X65DE,0X4D81,0X6564,0X4E21,0X64E9,0X4EC0,0X646C,0X4F5E, 0X63EF,0X4FFB,0X6371,0X5098,0X62F2,0X5134,0X6272,0X51CF,0X61F1,0X5269,0X616F,0X5303,0X60EC,0X539B,0X6068,0X5433,0X5FE4,0X54CA,0X5F5E,0X5560, 0X5ED7,0X55F6,0X5E50,0X568A,0X5DC8,0X571E,0X5D3E,0X57B1,0X5CB4,0X5843,0X5C29,0X58D4,0X5B9D,0X5964,0X5B10,0X59F4,0X5A82,0X5A82,0X59F4,0X5B10, 0X5964,0X5B9D,0X58D4,0X5C29,0X5843,0X5CB4,0X57B1,0X5D3E,0X571E,0X5DC8,0X568A,0X5E50,0X55F6,0X5ED7,0X5560,0X5F5E,0X54CA,0X5FE4,0X5433,0X6068, 0X539B,0X60EC,0X5303,0X616F,0X5269,0X61F1,0X51CF,0X6272,0X5134,0X62F2,0X5098,0X6371,0X4FFB,0X63EF,0X4F5E,0X646C,0X4EC0,0X64E9,0X4E21,0X6564, 0X4D81,0X65DE,0X4CE1,0X6657,0X4C40,0X66D0,0X4B9E,0X6747,0X4AFB,0X67BD,0X4A58,0X6832,0X49B4,0X68A7,0X490F,0X691A,0X486A,0X698C,0X47C4,0X69FD, 0X471D,0X6A6E,0X4675,0X6ADD,0X45CD,0X6B4B,0X4524,0X6BB8,0X447B,0X6C24,0X43D1,0X6C8F,0X4326,0X6CF9,0X427A,0X6D62,0X41CE,0X6DCA,0X4121,0X6E31, 0X4074,0X6E97,0X3FC6,0X6EFB,0X3F17,0X6F5F,0X3E68,0X6FC2,0X3DB8,0X7023,0X3D08,0X7083,0X3C57,0X70E3,0X3BA5,0X7141,0X3AF3,0X719E,0X3A40,0X71FA, 0X398D,0X7255,0X38D9,0X72AF,0X3825,0X7308,0X3770,0X735F,0X36BA,0X73B6,0X3604,0X740B,0X354E,0X7460,0X3497,0X74B3,0X33DF,0X7505,0X3327,0X7556, 0X326E,0X75A6,0X31B5,0X75F4,0X30FC,0X7642,0X3042,0X768E,0X2F87,0X76D9,0X2ECC,0X7723,0X2E11,0X776C,0X2D55,0X77B4,0X2C99,0X77FB,0X2BDC,0X7840, 0X2B1F,0X7885,0X2A62,0X78C8,0X29A4,0X790A,0X28E5,0X794A,0X2827,0X798A,0X2768,0X79C9,0X26A8,0X7A06,0X25E8,0X7A42,0X2528,0X7A7D,0X2467,0X7AB7, 0X23A7,0X7AEF,0X22E5,0X7B27,0X2224,0X7B5D,0X2162,0X7B92,0X209F,0X7BC6,0X1FDD,0X7BF9,0X1F1A,0X7C2A,0X1E57,0X7C5A,0X1D93,0X7C89,0X1CD0,0X7CB7, 0X1C0C,0X7CE4,0X1B47,0X7D0F,0X1A83,0X7D3A,0X19BE,0X7D63,0X18F9,0X7D8A,0X1833,0X7DB1,0X176E,0X7DD6,0X16A8,0X7DFB,0X15E2,0X7E1E,0X151C,0X7E3F, 0X1455,0X7E60,0X138F,0X7E7F,0X12C8,0X7E9D,0X1201,0X7EBA,0X113A,0X7ED6,0X1073,0X7EF0,0X0FAB,0X7F0A,0X0EE4,0X7F22,0X0E1C,0X7F38,0X0D54,0X7F4E, 0X0C8C,0X7F62,0X0BC4,0X7F75,0X0AFB,0X7F87,0X0A33,0X7F98,0X096B,0X7FA7,0X08A2,0X7FB5,0X07D9,0X7FC2,0X0711,0X7FCE,0X0648,0X7FD9,0X057F,0X7FE2, 0X04B6,0X7FEA,0X03ED,0X7FF1,0X0324,0X7FF6,0X025B,0X7FFA,0X0192,0X7FFE,0X00C9,0X7FFF,0X0000,0X7FFF,0XFF37,0X7FFF,0XFE6E,0X7FFE,0XFDA5,0X7FFA, 0XFCDC,0X7FF6,0XFC13,0X7FF1,0XFB4A,0X7FEA,0XFA81,0X7FE2,0XF9B8,0X7FD9,0XF8EF,0X7FCE,0XF827,0X7FC2,0XF75E,0X7FB5,0XF695,0X7FA7,0XF5CD,0X7F98, 0XF505,0X7F87,0XF43C,0X7F75,0XF374,0X7F62,0XF2AC,0X7F4E,0XF1E4,0X7F38,0XF11C,0X7F22,0XF055,0X7F0A,0XEF8D,0X7EF0,0XEEC6,0X7ED6,0XEDFF,0X7EBA, 0XED38,0X7E9D,0XEC71,0X7E7F,0XEBAB,0X7E60,0XEAE4,0X7E3F,0XEA1E,0X7E1E,0XE958,0X7DFB,0XE892,0X7DD6,0XE7CD,0X7DB1,0XE707,0X7D8A,0XE642,0X7D63, 0XE57D,0X7D3A,0XE4B9,0X7D0F,0XE3F4,0X7CE4,0XE330,0X7CB7,0XE26D,0X7C89,0XE1A9,0X7C5A,0XE0E6,0X7C2A,0XE023,0X7BF9,0XDF61,0X7BC6,0XDE9E,0X7B92, 0XDDDC,0X7B5D,0XDD1B,0X7B27,0XDC59,0X7AEF,0XDB99,0X7AB7,0XDAD8,0X7A7D,0XDA18,0X7A42,0XD958,0X7A06,0XD898,0X79C9,0XD7D9,0X798A,0XD71B,0X794A, 0XD65C,0X790A,0XD59E,0X78C8,0XD4E1,0X7885,0XD424,0X7840,0XD367,0X77FB,0XD2AB,0X77B4,0XD1EF,0X776C,0XD134,0X7723,0XD079,0X76D9,0XCFBE,0X768E, 0XCF04,0X7642,0XCE4B,0X75F4,0XCD92,0X75A6,0XCCD9,0X7556,0XCC21,0X7505,0XCB69,0X74B3,0XCAB2,0X7460,0XC9FC,0X740B,0XC946,0X73B6,0XC890,0X735F, 0XC7DB,0X7308,0XC727,0X72AF,0XC673,0X7255,0XC5C0,0X71FA,0XC50D,0X719E,0XC45B,0X7141,0XC3A9,0X70E3,0XC2F8,0X7083,0XC248,0X7023,0XC198,0X6FC2, 0XC0E9,0X6F5F,0XC03A,0X6EFB,0XBF8C,0X6E97,0XBEDF,0X6E31,0XBE32,0X6DCA,0XBD86,0X6D62,0XBCDA,0X6CF9,0XBC2F,0X6C8F,0XBB85,0X6C24,0XBADC,0X6BB8, 0XBA33,0X6B4B,0XB98B,0X6ADD,0XB8E3,0X6A6E,0XB83C,0X69FD,0XB796,0X698C,0XB6F1,0X691A,0XB64C,0X68A7,0XB5A8,0X6832,0XB505,0X67BD,0XB462,0X6747, 0XB3C0,0X66D0,0XB31F,0X6657,0XB27F,0X65DE,0XB1DF,0X6564,0XB140,0X64E9,0XB0A2,0X646C,0XB005,0X63EF,0XAF68,0X6371,0XAECC,0X62F2,0XAE31,0X6272, 0XAD97,0X61F1,0XACFD,0X616F,0XAC65,0X60EC,0XABCD,0X6068,0XAB36,0X5FE4,0XAAA0,0X5F5E,0XAA0A,0X5ED7,0XA976,0X5E50,0XA8E2,0X5DC8,0XA84F,0X5D3E, 0XA7BD,0X5CB4,0XA72C,0X5C29,0XA69C,0X5B9D,0XA60C,0X5B10,0XA57E,0X5A82,0XA4F0,0X59F4,0XA463,0X5964,0XA3D7,0X58D4,0XA34C,0X5843,0XA2C2,0X57B1, 0XA238,0X571E,0XA1B0,0X568A,0XA129,0X55F6,0XA0A2,0X5560,0XA01C,0X54CA,0X9F98,0X5433,0X9F14,0X539B,0X9E91,0X5303,0X9E0F,0X5269,0X9D8E,0X51CF, 0X9D0E,0X5134,0X9C8F,0X5098,0X9C11,0X4FFB,0X9B94,0X4F5E,0X9B17,0X4EC0,0X9A9C,0X4E21,0X9A22,0X4D81,0X99A9,0X4CE1,0X9930,0X4C40,0X98B9,0X4B9E, 0X9843,0X4AFB,0X97CE,0X4A58,0X9759,0X49B4,0X96E6,0X490F,0X9674,0X486A,0X9603,0X47C4,0X9592,0X471D,0X9523,0X4675,0X94B5,0X45CD,0X9448,0X4524, 0X93DC,0X447B,0X9371,0X43D1,0X9307,0X4326,0X929E,0X427A,0X9236,0X41CE,0X91CF,0X4121,0X9169,0X4074,0X9105,0X3FC6,0X90A1,0X3F17,0X903E,0X3E68, 0X8FDD,0X3DB8,0X8F7D,0X3D08,0X8F1D,0X3C57,0X8EBF,0X3BA5,0X8E62,0X3AF3,0X8E06,0X3A40,0X8DAB,0X398D,0X8D51,0X38D9,0X8CF8,0X3825,0X8CA1,0X3770, 0X8C4A,0X36BA,0X8BF5,0X3604,0X8BA0,0X354E,0X8B4D,0X3497,0X8AFB,0X33DF,0X8AAA,0X3327,0X8A5A,0X326E,0X8A0C,0X31B5,0X89BE,0X30FC,0X8972,0X3042, 0X8927,0X2F87,0X88DD,0X2ECC,0X8894,0X2E11,0X884C,0X2D55,0X8805,0X2C99,0X87C0,0X2BDC,0X877B,0X2B1F,0X8738,0X2A62,0X86F6,0X29A4,0X86B6,0X28E5, 0X8676,0X2827,0X8637,0X2768,0X85FA,0X26A8,0X85BE,0X25E8,0X8583,0X2528,0X8549,0X2467,0X8511,0X23A7,0X84D9,0X22E5,0X84A3,0X2224,0X846E,0X2162, 0X843A,0X209F,0X8407,0X1FDD,0X83D6,0X1F1A,0X83A6,0X1E57,0X8377,0X1D93,0X8349,0X1CD0,0X831C,0X1C0C,0X82F1,0X1B47,0X82C6,0X1A83,0X829D,0X19BE, 0X8276,0X18F9,0X824F,0X1833,0X822A,0X176E,0X8205,0X16A8,0X81E2,0X15E2,0X81C1,0X151C,0X81A0,0X1455,0X8181,0X138F,0X8163,0X12C8,0X8146,0X1201, 0X812A,0X113A,0X8110,0X1073,0X80F6,0X0FAB,0X80DE,0X0EE4,0X80C8,0X0E1C,0X80B2,0X0D54,0X809E,0X0C8C,0X808B,0X0BC4,0X8079,0X0AFB,0X8068,0X0A33, 0X8059,0X096B,0X804B,0X08A2,0X803E,0X07D9,0X8032,0X0711,0X8027,0X0648,0X801E,0X057F,0X8016,0X04B6,0X800F,0X03ED,0X800A,0X0324,0X8006,0X025B, 0X8002,0X0192,0X8001,0X00C9,0X8001,0X0000,0X8001,0XFF37,0X8002,0XFE6E,0X8006,0XFDA5,0X800A,0XFCDC,0X800F,0XFC13,0X8016,0XFB4A,0X801E,0XFA81, 0X8027,0XF9B8,0X8032,0XF8EF,0X803E,0XF827,0X804B,0XF75E,0X8059,0XF695,0X8068,0XF5CD,0X8079,0XF505,0X808B,0XF43C,0X809E,0XF374,0X80B2,0XF2AC, 0X80C8,0XF1E4,0X80DE,0XF11C,0X80F6,0XF055,0X8110,0XEF8D,0X812A,0XEEC6,0X8146,0XEDFF,0X8163,0XED38,0X8181,0XEC71,0X81A0,0XEBAB,0X81C1,0XEAE4, 0X81E2,0XEA1E,0X8205,0XE958,0X822A,0XE892,0X824F,0XE7CD,0X8276,0XE707,0X829D,0XE642,0X82C6,0XE57D,0X82F1,0XE4B9,0X831C,0XE3F4,0X8349,0XE330, 0X8377,0XE26D,0X83A6,0XE1A9,0X83D6,0XE0E6,0X8407,0XE023,0X843A,0XDF61,0X846E,0XDE9E,0X84A3,0XDDDC,0X84D9,0XDD1B,0X8511,0XDC59,0X8549,0XDB99, 0X8583,0XDAD8,0X85BE,0XDA18,0X85FA,0XD958,0X8637,0XD898,0X8676,0XD7D9,0X86B6,0XD71B,0X86F6,0XD65C,0X8738,0XD59E,0X877B,0XD4E1,0X87C0,0XD424, 0X8805,0XD367,0X884C,0XD2AB,0X8894,0XD1EF,0X88DD,0XD134,0X8927,0XD079,0X8972,0XCFBE,0X89BE,0XCF04,0X8A0C,0XCE4B,0X8A5A,0XCD92,0X8AAA,0XCCD9, 0X8AFB,0XCC21,0X8B4D,0XCB69,0X8BA0,0XCAB2,0X8BF5,0XC9FC,0X8C4A,0XC946,0X8CA1,0XC890,0X8CF8,0XC7DB,0X8D51,0XC727,0X8DAB,0XC673,0X8E06,0XC5C0, 0X8E62,0XC50D,0X8EBF,0XC45B,0X8F1D,0XC3A9,0X8F7D,0XC2F8,0X8FDD,0XC248,0X903E,0XC198,0X90A1,0XC0E9,0X9105,0XC03A,0X9169,0XBF8C,0X91CF,0XBEDF, 0X9236,0XBE32,0X929E,0XBD86,0X9307,0XBCDA,0X9371,0XBC2F,0X93DC,0XBB85,0X9448,0XBADC,0X94B5,0XBA33,0X9523,0XB98B,0X9592,0XB8E3,0X9603,0XB83C, 0X9674,0XB796,0X96E6,0XB6F1,0X9759,0XB64C,0X97CE,0XB5A8,0X9843,0XB505,0X98B9,0XB462,0X9930,0XB3C0,0X99A9,0XB31F,0X9A22,0XB27F,0X9A9C,0XB1DF, 0X9B17,0XB140,0X9B94,0XB0A2,0X9C11,0XB005,0X9C8F,0XAF68,0X9D0E,0XAECC,0X9D8E,0XAE31,0X9E0F,0XAD97,0X9E91,0XACFD,0X9F14,0XAC65,0X9F98,0XABCD, 0XA01C,0XAB36,0XA0A2,0XAAA0,0XA129,0XAA0A,0XA1B0,0XA976,0XA238,0XA8E2,0XA2C2,0XA84F,0XA34C,0XA7BD,0XA3D7,0XA72C,0XA463,0XA69C,0XA4F0,0XA60C, 0XA57E,0XA57E,0XA60C,0XA4F0,0XA69C,0XA463,0XA72C,0XA3D7,0XA7BD,0XA34C,0XA84F,0XA2C2,0XA8E2,0XA238,0XA976,0XA1B0,0XAA0A,0XA129,0XAAA0,0XA0A2, 0XAB36,0XA01C,0XABCD,0X9F98,0XAC65,0X9F14,0XACFD,0X9E91,0XAD97,0X9E0F,0XAE31,0X9D8E,0XAECC,0X9D0E,0XAF68,0X9C8F,0XB005,0X9C11,0XB0A2,0X9B94, 0XB140,0X9B17,0XB1DF,0X9A9C,0XB27F,0X9A22,0XB31F,0X99A9,0XB3C0,0X9930,0XB462,0X98B9,0XB505,0X9843,0XB5A8,0X97CE,0XB64C,0X9759,0XB6F1,0X96E6, 0XB796,0X9674,0XB83C,0X9603,0XB8E3,0X9592,0XB98B,0X9523,0XBA33,0X94B5,0XBADC,0X9448,0XBB85,0X93DC,0XBC2F,0X9371,0XBCDA,0X9307,0XBD86,0X929E, 0XBE32,0X9236,0XBEDF,0X91CF,0XBF8C,0X9169,0XC03A,0X9105,0XC0E9,0X90A1,0XC198,0X903E,0XC248,0X8FDD,0XC2F8,0X8F7D,0XC3A9,0X8F1D,0XC45B,0X8EBF, 0XC50D,0X8E62,0XC5C0,0X8E06,0XC673,0X8DAB,0XC727,0X8D51,0XC7DB,0X8CF8,0XC890,0X8CA1,0XC946,0X8C4A,0XC9FC,0X8BF5,0XCAB2,0X8BA0,0XCB69,0X8B4D, 0XCC21,0X8AFB,0XCCD9,0X8AAA,0XCD92,0X8A5A,0XCE4B,0X8A0C,0XCF04,0X89BE,0XCFBE,0X8972,0XD079,0X8927,0XD134,0X88DD,0XD1EF,0X8894,0XD2AB,0X884C, 0XD367,0X8805,0XD424,0X87C0,0XD4E1,0X877B,0XD59E,0X8738,0XD65C,0X86F6,0XD71B,0X86B6,0XD7D9,0X8676,0XD898,0X8637,0XD958,0X85FA,0XDA18,0X85BE, 0XDAD8,0X8583,0XDB99,0X8549,0XDC59,0X8511,0XDD1B,0X84D9,0XDDDC,0X84A3,0XDE9E,0X846E,0XDF61,0X843A,0XE023,0X8407,0XE0E6,0X83D6,0XE1A9,0X83A6, 0XE26D,0X8377,0XE330,0X8349,0XE3F4,0X831C,0XE4B9,0X82F1,0XE57D,0X82C6,0XE642,0X829D,0XE707,0X8276,0XE7CD,0X824F,0XE892,0X822A,0XE958,0X8205, 0XEA1E,0X81E2,0XEAE4,0X81C1,0XEBAB,0X81A0,0XEC71,0X8181,0XED38,0X8163,0XEDFF,0X8146,0XEEC6,0X812A,0XEF8D,0X8110,0XF055,0X80F6,0XF11C,0X80DE, 0XF1E4,0X80C8,0XF2AC,0X80B2,0XF374,0X809E,0XF43C,0X808B,0XF505,0X8079,0XF5CD,0X8068,0XF695,0X8059,0XF75E,0X804B,0XF827,0X803E,0XF8EF,0X8032, 0XF9B8,0X8027,0XFA81,0X801E,0XFB4A,0X8016,0XFC13,0X800F,0XFCDC,0X800A,0XFDA5,0X8006,0XFE6E,0X8002,0XFF37,0X8001,0X0000,0X8001,0X00C9,0X8001, 0X0192,0X8002,0X025B,0X8006,0X0324,0X800A,0X03ED,0X800F,0X04B6,0X8016,0X057F,0X801E,0X0648,0X8027,0X0711,0X8032,0X07D9,0X803E,0X08A2,0X804B, 0X096B,0X8059,0X0A33,0X8068,0X0AFB,0X8079,0X0BC4,0X808B,0X0C8C,0X809E,0X0D54,0X80B2,0X0E1C,0X80C8,0X0EE4,0X80DE,0X0FAB,0X80F6,0X1073,0X8110, 0X113A,0X812A,0X1201,0X8146,0X12C8,0X8163,0X138F,0X8181,0X1455,0X81A0,0X151C,0X81C1,0X15E2,0X81E2,0X16A8,0X8205,0X176E,0X822A,0X1833,0X824F, 0X18F9,0X8276,0X19BE,0X829D,0X1A83,0X82C6,0X1B47,0X82F1,0X1C0C,0X831C,0X1CD0,0X8349,0X1D93,0X8377,0X1E57,0X83A6,0X1F1A,0X83D6,0X1FDD,0X8407, 0X209F,0X843A,0X2162,0X846E,0X2224,0X84A3,0X22E5,0X84D9,0X23A7,0X8511,0X2467,0X8549,0X2528,0X8583,0X25E8,0X85BE,0X26A8,0X85FA,0X2768,0X8637, 0X2827,0X8676,0X28E5,0X86B6,0X29A4,0X86F6,0X2A62,0X8738,0X2B1F,0X877B,0X2BDC,0X87C0,0X2C99,0X8805,0X2D55,0X884C,0X2E11,0X8894,0X2ECC,0X88DD, 0X2F87,0X8927,0X3042,0X8972,0X30FC,0X89BE,0X31B5,0X8A0C,0X326E,0X8A5A,0X3327,0X8AAA,0X33DF,0X8AFB,0X3497,0X8B4D,0X354E,0X8BA0,0X3604,0X8BF5, 0X36BA,0X8C4A,0X3770,0X8CA1,0X3825,0X8CF8,0X38D9,0X8D51,0X398D,0X8DAB,0X3A40,0X8E06,0X3AF3,0X8E62,0X3BA5,0X8EBF,0X3C57,0X8F1D,0X3D08,0X8F7D, 0X3DB8,0X8FDD,0X3E68,0X903E,0X3F17,0X90A1,0X3FC6,0X9105,0X4074,0X9169,0X4121,0X91CF,0X41CE,0X9236,0X427A,0X929E,0X4326,0X9307,0X43D1,0X9371, 0X447B,0X93DC,0X4524,0X9448,0X45CD,0X94B5,0X4675,0X9523,0X471D,0X9592,0X47C4,0X9603,0X486A,0X9674,0X490F,0X96E6,0X49B4,0X9759,0X4A58,0X97CE, 0X4AFB,0X9843,0X4B9E,0X98B9,0X4C40,0X9930,0X4CE1,0X99A9,0X4D81,0X9A22,0X4E21,0X9A9C,0X4EC0,0X9B17,0X4F5E,0X9B94,0X4FFB,0X9C11,0X5098,0X9C8F, 0X5134,0X9D0E,0X51CF,0X9D8E,0X5269,0X9E0F,0X5303,0X9E91,0X539B,0X9F14,0X5433,0X9F98,0X54CA,0XA01C,0X5560,0XA0A2,0X55F6,0XA129,0X568A,0XA1B0, 0X571E,0XA238,0X57B1,0XA2C2,0X5843,0XA34C,0X58D4,0XA3D7,0X5964,0XA463,0X59F4,0XA4F0,0X5A82,0XA57E,0X5B10,0XA60C,0X5B9D,0XA69C,0X5C29,0XA72C, 0X5CB4,0XA7BD,0X5D3E,0XA84F,0X5DC8,0XA8E2,0X5E50,0XA976,0X5ED7,0XAA0A,0X5F5E,0XAAA0,0X5FE4,0XAB36,0X6068,0XABCD,0X60EC,0XAC65,0X616F,0XACFD, 0X61F1,0XAD97,0X6272,0XAE31,0X62F2,0XAECC,0X6371,0XAF68,0X63EF,0XB005,0X646C,0XB0A2,0X64E9,0XB140,0X6564,0XB1DF,0X65DE,0XB27F,0X6657,0XB31F, 0X66D0,0XB3C0,0X6747,0XB462,0X67BD,0XB505,0X6832,0XB5A8,0X68A7,0XB64C,0X691A,0XB6F1,0X698C,0XB796,0X69FD,0XB83C,0X6A6E,0XB8E3,0X6ADD,0XB98B, 0X6B4B,0XBA33,0X6BB8,0XBADC,0X6C24,0XBB85,0X6C8F,0XBC2F,0X6CF9,0XBCDA,0X6D62,0XBD86,0X6DCA,0XBE32,0X6E31,0XBEDF,0X6E97,0XBF8C,0X6EFB,0XC03A, 0X6F5F,0XC0E9,0X6FC2,0XC198,0X7023,0XC248,0X7083,0XC2F8,0X70E3,0XC3A9,0X7141,0XC45B,0X719E,0XC50D,0X71FA,0XC5C0,0X7255,0XC673,0X72AF,0XC727, 0X7308,0XC7DB,0X735F,0XC890,0X73B6,0XC946,0X740B,0XC9FC,0X7460,0XCAB2,0X74B3,0XCB69,0X7505,0XCC21,0X7556,0XCCD9,0X75A6,0XCD92,0X75F4,0XCE4B, 0X7642,0XCF04,0X768E,0XCFBE,0X76D9,0XD079,0X7723,0XD134,0X776C,0XD1EF,0X77B4,0XD2AB,0X77FB,0XD367,0X7840,0XD424,0X7885,0XD4E1,0X78C8,0XD59E, 0X790A,0XD65C,0X794A,0XD71B,0X798A,0XD7D9,0X79C9,0XD898,0X7A06,0XD958,0X7A42,0XDA18,0X7A7D,0XDAD8,0X7AB7,0XDB99,0X7AEF,0XDC59,0X7B27,0XDD1B, 0X7B5D,0XDDDC,0X7B92,0XDE9E,0X7BC6,0XDF61,0X7BF9,0XE023,0X7C2A,0XE0E6,0X7C5A,0XE1A9,0X7C89,0XE26D,0X7CB7,0XE330,0X7CE4,0XE3F4,0X7D0F,0XE4B9, 0X7D3A,0XE57D,0X7D63,0XE642,0X7D8A,0XE707,0X7DB1,0XE7CD,0X7DD6,0XE892,0X7DFB,0XE958,0X7E1E,0XEA1E,0X7E3F,0XEAE4,0X7E60,0XEBAB,0X7E7F,0XEC71, 0X7E9D,0XED38,0X7EBA,0XEDFF,0X7ED6,0XEEC6,0X7EF0,0XEF8D,0X7F0A,0XF055,0X7F22,0XF11C,0X7F38,0XF1E4,0X7F4E,0XF2AC,0X7F62,0XF374,0X7F75,0XF43C, 0X7F87,0XF505,0X7F98,0XF5CD,0X7FA7,0XF695,0X7FB5,0XF75E,0X7FC2,0XF827,0X7FCE,0XF8EF,0X7FD9,0XF9B8,0X7FE2,0XFA81,0X7FEA,0XFB4A,0X7FF1,0XFC13, 0X7FF6,0XFCDC,0X7FFA,0XFDA5,0X7FFE,0XFE6E,0X7FFF,0XFF37}; #elif defined(MAX_FFT_SIZE_512) const INT16 radix4FftTwiddleArr[2*MAX_FFT_SIZE] = {0X7FFF,0x0000,0x7FFE,0x0192,0x7FF6,0x0324,0x7FEA,0x04B6,0x7FD9,0x0648,0x7FC2,0x07D9,0x7FA7,0x096B,0x7F87,0x0AFB,0x7F62,0x0C8C,0x7F38,0x0E1C,0x7F0A,0x0FAB,0x7ED6,0x113A,0x7E9D,0x12C8,0x7E60,0x1455,0x7E1E,0x15E2,0x7DD6,0x176E,0x7D8A,0x18F9,0x7D3A,0x1A83,0x7CE4,0x1C0C,0x7C89,0x1D93,0x7C2A,0x1F1A,0x7BC6,0x209F,0x7B5D,0x2224,0x7AEF,0x23A7,0x7A7D,0x2528,0x7A06,0x26A8,0x798A,0x2827,0x790A,0x29A4,0x7885,0x2B1F,0x77FB,0x2C99,0x776C,0x2E11,0x76D9,0x2F87,0x7642,0x30FC,0x75A6,0x326E,0x7505,0x33DF,0x7460,0x354E,0x73B6,0x36BA,0x7308,0x3825,0x7255,0x398D,0x719E,0x3AF3,0x70E3,0x3C57,0x7023,0x3DB8,0x6F5F,0x3F17,0x6E97,0x4074,0x6DCA,0x41CE,0x6CF9,0x4326,0x6C24,0x447B,0x6B4B,0x45CD,0x6A6E,0x471D,0x698C,0x486A,0x68A7,0x49B4,0x67BD,0x4AFB,0x66D0,0x4C40,0x65DE,0x4D81,0x64E9,0x4EC0,0x63EF,0x4FFB,0x62F2,0x5134,0x61F1,0x5269,0x60EC,0x539B,0x5FE4,0x54CA,0x5ED7,0x55F6,0x5DC8,0x571E,0x5CB4,0x5843,0x5B9D,0x5964,0x5A82,0x5A82,0x5964,0x5B9D,0x5843,0x5CB4,0x571E,0x5DC8,0x55F6,0x5ED7,0x54CA,0x5FE4,0x539B,0x60EC,0x5269,0x61F1,0x5134,0x62F2,0x4FFB,0x63EF,0x4EC0,0x64E9,0x4D81,0x65DE,0x4C40,0x66D0,0x4AFB,0x67BD,0x49B4,0x68A7,0x486A,0x698C,0x471D,0x6A6E,0x45CD,0x6B4B,0x447B,0x6C24,0x4326,0x6CF9,0x41CE,0x6DCA,0x4074,0x6E97,0x3F17,0x6F5F,0x3DB8,0x7023,0x3C57,0x70E3,0x3AF3,0x719E,0x398D,0x7255,0x3825,0x7308,0x36BA,0x73B6,0x354E,0x7460,0x33DF,0x7505,0x326E,0x75A6,0x30FC,0x7642,0x2F87,0x76D9,0x2E11,0x776C,0x2C99,0x77FB,0x2B1F,0x7885,0x29A4,0x790A,0x2827,0x798A,0x26A8,0x7A06,0x2528,0x7A7D,0x23A7,0x7AEF,0x2224,0x7B5D,0x209F,0x7BC6,0x1F1A,0x7C2A,0x1D93,0x7C89,0x1C0C,0x7CE4,0x1A83,0x7D3A,0x18F9,0x7D8A,0x176E,0x7DD6,0x15E2,0x7E1E,0x1455,0x7E60,0x12C8,0x7E9D,0x113A,0x7ED6,0x0FAB,0x7F0A,0x0E1C,0x7F38,0x0C8C,0x7F62,0x0AFB,0x7F87,0x096B,0x7FA7,0x07D9,0x7FC2,0x0648,0x7FD9,0x04B6,0x7FEA,0x0324,0x7FF6,0x0192,0x7FFE,0x0000,0x7FFF,0xFE6E,0x7FFE,0xFCDC,0x7FF6,0xFB4A,0x7FEA,0xF9B8,0x7FD9,0xF827,0x7FC2,0xF695,0x7FA7,0xF505,0x7F87,0xF374,0x7F62,0xF1E4,0x7F38,0xF055,0x7F0A,0xEEC6,0x7ED6,0xED38,0x7E9D,0xEBAB,0x7E60,0xEA1E,0x7E1E,0xE892,0x7DD6,0xE707,0x7D8A,0xE57D,0x7D3A,0xE3F4,0x7CE4,0xE26D,0x7C89,0xE0E6,0x7C2A,0xDF61,0x7BC6,0xDDDC,0x7B5D,0xDC59,0x7AEF,0xDAD8,0x7A7D,0xD958,0x7A06,0xD7D9,0x798A,0xD65C,0x790A,0xD4E1,0x7885,0xD367,0x77FB,0xD1EF,0x776C,0xD079,0x76D9,0xCF04,0x7642,0xCD92,0x75A6,0xCC21,0x7505,0xCAB2,0x7460,0xC946,0x73B6,0xC7DB,0x7308,0xC673,0x7255,0xC50D,0x719E,0xC3A9,0x70E3,0xC248,0x7023,0xC0E9,0x6F5F,0xBF8C,0x6E97,0xBE32,0x6DCA,0xBCDA,0x6CF9,0xBB85,0x6C24,0xBA33,0x6B4B,0xB8E3,0x6A6E,0xB796,0x698C,0xB64C,0x68A7,0xB505,0x67BD,0xB3C0,0x66D0,0xB27F,0x65DE,0xB140,0x64E9,0xB005,0x63EF,0xAECC,0x62F2,0xAD97,0x61F1,0xAC65,0x60EC,0xAB36,0x5FE4,0xAA0A,0x5ED7,0xA8E2,0x5DC8,0xA7BD,0x5CB4,0xA69C,0x5B9D,0xA57E,0x5A82,0xA463,0x5964,0xA34C,0x5843,0xA238,0x571E,0xA129,0x55F6,0xA01C,0x54CA,0x9F14,0x539B,0x9E0F,0x5269,0x9D0E,0x5134,0x9C11,0x4FFB,0x9B17,0x4EC0,0x9A22,0x4D81,0x9930,0x4C40,0x9843,0x4AFB,0x9759,0x49B4,0x9674,0x486A,0x9592,0x471D,0x94B5,0x45CD,0x93DC,0x447B,0x9307,0x4326,0x9236,0x41CE,0x9169,0x4074,0x90A1,0x3F17,0x8FDD,0x3DB8,0x8F1D,0x3C57,0x8E62,0x3AF3,0x8DAB,0x398D,0x8CF8,0x3825,0x8C4A,0x36BA,0x8BA0,0x354E,0x8AFB,0x33DF,0x8A5A,0x326E,0x89BE,0x30FC,0x8927,0x2F87,0x8894,0x2E11,0x8805,0x2C99,0x877B,0x2B1F,0x86F6,0x29A4,0x8676,0x2827,0x85FA,0x26A8,0x8583,0x2528,0x8511,0x23A7,0x84A3,0x2224,0x843A,0x209F,0x83D6,0x1F1A,0x8377,0x1D93,0x831C,0x1C0C,0x82C6,0x1A83,0x8276,0x18F9,0x822A,0x176E,0x81E2,0x15E2,0x81A0,0x1455,0x8163,0x12C8,0x812A,0x113A,0x80F6,0x0FAB,0x80C8,0x0E1C,0x809E,0x0C8C,0x8079,0x0AFB,0x8059,0x096B,0x803E,0x07D9,0x8027,0x0648,0x8016,0x04B6,0x800A,0x0324,0x8002,0x0192,0x8000,0x0000,0x8002,0xFE6E,0x800A,0xFCDC,0x8016,0xFB4A,0x8027,0xF9B8,0x803E,0xF827,0x8059,0xF695,0x8079,0xF505,0x809E,0xF374,0x80C8,0xF1E4,0x80F6,0xF055,0x812A,0xEEC6,0x8163,0xED38,0x81A0,0xEBAB,0x81E2,0xEA1E,0x822A,0xE892,0x8276,0xE707,0x82C6,0xE57D,0x831C,0xE3F4,0x8377,0xE26D,0x83D6,0xE0E6,0x843A,0xDF61,0x84A3,0xDDDC,0x8511,0xDC59,0x8583,0xDAD8,0x85FA,0xD958,0x8676,0xD7D9,0x86F6,0xD65C,0x877B,0xD4E1,0x8805,0xD367,0x8894,0xD1EF,0x8927,0xD079,0x89BE,0xCF04,0x8A5A,0xCD92,0x8AFB,0xCC21,0x8BA0,0xCAB2,0x8C4A,0xC946,0x8CF8,0xC7DB,0x8DAB,0xC673,0x8E62,0xC50D,0x8F1D,0xC3A9,0x8FDD,0xC248,0x90A1,0xC0E9,0x9169,0xBF8C,0x9236,0xBE32,0x9307,0xBCDA,0x93DC,0xBB85,0x94B5,0xBA33,0x9592,0xB8E3,0x9674,0xB796,0x9759,0xB64C,0x9843,0xB505,0x9930,0xB3C0,0x9A22,0xB27F,0x9B17,0xB140,0x9C11,0xB005,0x9D0E,0xAECC,0x9E0F,0xAD97,0x9F14,0xAC65,0xA01C,0xAB36,0xA129,0xAA0A,0xA238,0xA8E2,0xA34C,0xA7BD,0xA463,0xA69C,0xA57E,0xA57E,0xA69C,0xA463,0xA7BD,0xA34C,0xA8E2,0xA238,0xAA0A,0xA129,0xAB36,0xA01C,0xAC65,0x9F14,0xAD97,0x9E0F,0xAECC,0x9D0E,0xB005,0x9C11,0xB140,0x9B17,0xB27F,0x9A22,0xB3C0,0x9930,0xB505,0x9843,0xB64C,0x9759,0xB796,0x9674,0xB8E3,0x9592,0xBA33,0x94B5,0xBB85,0x93DC,0xBCDA,0x9307,0xBE32,0x9236,0xBF8C,0x9169,0xC0E9,0x90A1,0xC248,0x8FDD,0xC3A9,0x8F1D,0xC50D,0x8E62,0xC673,0x8DAB,0xC7DB,0x8CF8,0xC946,0x8C4A,0xCAB2,0x8BA0,0xCC21,0x8AFB,0xCD92,0x8A5A,0xCF04,0x89BE,0xD079,0x8927,0xD1EF,0x8894,0xD367,0x8805,0xD4E1,0x877B,0xD65C,0x86F6,0xD7D9,0x8676,0xD958,0x85FA,0xDAD8,0x8583,0xDC59,0x8511,0xDDDC,0x84A3,0xDF61,0x843A,0xE0E6,0x83D6,0xE26D,0x8377,0xE3F4,0x831C,0xE57D,0x82C6,0xE707,0x8276,0xE892,0x822A,0xEA1E,0x81E2,0xEBAB,0x81A0,0xED38,0x8163,0xEEC6,0x812A,0xF055,0x80F6,0xF1E4,0x80C8,0xF374,0x809E,0xF505,0x8079,0xF695,0x8059,0xF827,0x803E,0xF9B8,0x8027,0xFB4A,0x8016,0xFCDC,0x800A,0xFE6E,0x8002,0x0000,0x8000,0x0192,0x8002,0x0324,0x800A,0x04B6,0x8016,0x0648,0x8027,0x07D9,0x803E,0x096B,0x8059,0x0AFB,0x8079,0x0C8C,0x809E,0x0E1C,0x80C8,0x0FAB,0x80F6,0x113A,0x812A,0x12C8,0x8163,0x1455,0x81A0,0x15E2,0x81E2,0x176E,0x822A,0x18F9,0x8276,0x1A83,0x82C6,0x1C0C,0x831C,0x1D93,0x8377,0x1F1A,0x83D6,0x209F,0x843A,0x2224,0x84A3,0x23A7,0x8511,0x2528,0x8583,0x26A8,0x85FA,0x2827,0x8676,0x29A4,0x86F6,0x2B1F,0x877B,0x2C99,0x8805,0x2E11,0x8894,0x2F87,0x8927,0x30FC,0x89BE,0x326E,0x8A5A,0x33DF,0x8AFB,0x354E,0x8BA0,0x36BA,0x8C4A,0x3825,0x8CF8,0x398D,0x8DAB,0x3AF3,0x8E62,0x3C57,0x8F1D,0x3DB8,0x8FDD,0x3F17,0x90A1,0x4074,0x9169,0x41CE,0x9236,0x4326,0x9307,0x447B,0x93DC,0x45CD,0x94B5,0x471D,0x9592,0x486A,0x9674,0x49B4,0x9759,0x4AFB,0x9843,0x4C40,0x9930,0x4D81,0x9A22,0x4EC0,0x9B17,0x4FFB,0x9C11,0x5134,0x9D0E,0x5269,0x9E0F,0x539B,0x9F14,0x54CA,0xA01C,0x55F6,0xA129,0x571E,0xA238,0x5843,0xA34C,0x5964,0xA463,0x5A82,0xA57E,0x5B9D,0xA69C,0x5CB4,0xA7BD,0x5DC8,0xA8E2,0x5ED7,0xAA0A,0x5FE4,0xAB36,0x60EC,0xAC65,0x61F1,0xAD97,0x62F2,0xAECC,0x63EF,0xB005,0x64E9,0xB140,0x65DE,0xB27F,0x66D0,0xB3C0,0x67BD,0xB505,0x68A7,0xB64C,0x698C,0xB796,0x6A6E,0xB8E3,0x6B4B,0xBA33,0x6C24,0xBB85,0x6CF9,0xBCDA,0x6DCA,0xBE32,0x6E97,0xBF8C,0x6F5F,0xC0E9,0x7023,0xC248,0x70E3,0xC3A9,0x719E,0xC50D,0x7255,0xC673,0x7308,0xC7DB,0x73B6,0xC946,0x7460,0xCAB2,0x7505,0xCC21,0x75A6,0xCD92,0x7642,0xCF04,0x76D9,0xD079,0x776C,0xD1EF,0x77FB,0xD367,0x7885,0xD4E1,0x790A,0xD65C,0x798A,0xD7D9,0x7A06,0xD958,0x7A7D,0xDAD8,0x7AEF,0xDC59,0x7B5D,0xDDDC,0x7BC6,0xDF61,0x7C2A,0xE0E6,0x7C89,0xE26D,0x7CE4,0xE3F4,0x7D3A,0xE57D,0x7D8A,0xE707,0x7DD6,0xE892,0x7E1E,0xEA1E,0x7E60,0xEBAB,0x7E9D,0xED38,0x7ED6,0xEEC6,0x7F0A,0xF055,0x7F38,0xF1E4,0x7F62,0xF374,0x7F87,0xF505,0x7FA7,0xF695,0x7FC2,0xF827,0x7FD9,0xF9B8,0x7FEA,0xFB4A,0x7FF6,0xFCDC,0x7FFE,0xFE6E}; #elif defined(MAX_FFT_SIZE_256) #ifdef TWIDDLE_HALF_SIZE const INT16 radix4FftTwiddleArr[MAX_FFT_SIZE] = {0X7FFF,0x0000,0x7FF6,0x0324,0x7FD9,0x0648,0x7FA7,0x096B,0x7F62,0x0C8C,0x7F0A,0x0FAB,0x7E9D,0x12C8,0x7E1E,0x15E2,0x7D8A,0x18F9,0x7CE4,0x1C0C,0x7C2A,0x1F1A,0x7B5D,0x2224,0x7A7D,0x2528,0x798A,0x2827,0x7885,0x2B1F,0x776C,0x2E11,0x7642,0x30FC,0x7505,0x33DF,0x73B6,0x36BA,0x7255,0x398D,0x70E3,0x3C57,0x6F5F,0x3F17,0x6DCA,0x41CE,0x6C24,0x447B,0x6A6E,0x471D,0x68A7,0x49B4,0x66D0,0x4C40,0x64E9,0x4EC0,0x62F2,0x5134,0x60EC,0x539B,0x5ED7,0x55F6,0x5CB4,0x5843,0x5A82,0x5A82,0x5843,0x5CB4,0x55F6,0x5ED7,0x539B,0x60EC,0x5134,0x62F2,0x4EC0,0x64E9,0x4C40,0x66D0,0x49B4,0x68A7,0x471D,0x6A6E,0x447B,0x6C24,0x41CE,0x6DCA,0x3F17,0x6F5F,0x3C57,0x70E3,0x398D,0x7255,0x36BA,0x73B6,0x33DF,0x7505,0x30FC,0x7642,0x2E11,0x776C,0x2B1F,0x7885,0x2827,0x798A,0x2528,0x7A7D,0x2224,0x7B5D,0x1F1A,0x7C2A,0x1C0C,0x7CE4,0x18F9,0x7D8A,0x15E2,0x7E1E,0x12C8,0x7E9D,0x0FAB,0x7F0A,0x0C8C,0x7F62,0x096B,0x7FA7,0x0648,0x7FD9,0x0324,0x7FF6,0x0000,0x7FFF,0xFCDC,0x7FF6,0xF9B8,0x7FD9,0xF695,0x7FA7,0xF374,0x7F62,0xF055,0x7F0A,0xED38,0x7E9D,0xEA1E,0x7E1E,0xE707,0x7D8A,0xE3F4,0x7CE4,0xE0E6,0x7C2A,0xDDDC,0x7B5D,0xDAD8,0x7A7D,0xD7D9,0x798A,0xD4E1,0x7885,0xD1EF,0x776C,0xCF04,0x7642,0xCC21,0x7505,0xC946,0x73B6,0xC673,0x7255,0xC3A9,0x70E3,0xC0E9,0x6F5F,0xBE32,0x6DCA,0xBB85,0x6C24,0xB8E3,0x6A6E,0xB64C,0x68A7,0xB3C0,0x66D0,0xB140,0x64E9,0xAECC,0x62F2,0xAC65,0x60EC,0xAA0A,0x5ED7,0xA7BD,0x5CB4,0xA57E,0x5A82,0xA34C,0x5843,0xA129,0x55F6,0x9F14,0x539B,0x9D0E,0x5134,0x9B17,0x4EC0,0x9930,0x4C40,0x9759,0x49B4,0x9592,0x471D,0x93DC,0x447B,0x9236,0x41CE,0x90A1,0x3F17,0x8F1D,0x3C57,0x8DAB,0x398D,0x8C4A,0x36BA,0x8AFB,0x33DF,0x89BE,0x30FC,0x8894,0x2E11,0x877B,0x2B1F,0x8676,0x2827,0x8583,0x2528,0x84A3,0x2224,0x83D6,0x1F1A,0x831C,0x1C0C,0x8276,0x18F9,0x81E2,0x15E2,0x8163,0x12C8,0x80F6,0x0FAB,0x809E,0x0C8C,0x8059,0x096B,0x8027,0x0648,0x800A,0x0324}; #else const INT16 radix4FftTwiddleArr[2*MAX_FFT_SIZE] = {0X7FFF,0x0000,0x7FF6,0x0324,0x7FD9,0x0648,0x7FA7,0x096B,0x7F62,0x0C8C,0x7F0A,0x0FAB,0x7E9D,0x12C8,0x7E1E,0x15E2,0x7D8A,0x18F9,0x7CE4,0x1C0C,0x7C2A,0x1F1A,0x7B5D,0x2224,0x7A7D,0x2528,0x798A,0x2827,0x7885,0x2B1F,0x776C,0x2E11,0x7642,0x30FC,0x7505,0x33DF,0x73B6,0x36BA,0x7255,0x398D,0x70E3,0x3C57,0x6F5F,0x3F17,0x6DCA,0x41CE,0x6C24,0x447B,0x6A6E,0x471D,0x68A7,0x49B4,0x66D0,0x4C40,0x64E9,0x4EC0,0x62F2,0x5134,0x60EC,0x539B,0x5ED7,0x55F6,0x5CB4,0x5843,0x5A82,0x5A82,0x5843,0x5CB4,0x55F6,0x5ED7,0x539B,0x60EC,0x5134,0x62F2,0x4EC0,0x64E9,0x4C40,0x66D0,0x49B4,0x68A7,0x471D,0x6A6E,0x447B,0x6C24,0x41CE,0x6DCA,0x3F17,0x6F5F,0x3C57,0x70E3,0x398D,0x7255,0x36BA,0x73B6,0x33DF,0x7505,0x30FC,0x7642,0x2E11,0x776C,0x2B1F,0x7885,0x2827,0x798A,0x2528,0x7A7D,0x2224,0x7B5D,0x1F1A,0x7C2A,0x1C0C,0x7CE4,0x18F9,0x7D8A,0x15E2,0x7E1E,0x12C8,0x7E9D,0x0FAB,0x7F0A,0x0C8C,0x7F62,0x096B,0x7FA7,0x0648,0x7FD9,0x0324,0x7FF6,0x0000,0x7FFF,0xFCDC,0x7FF6,0xF9B8,0x7FD9,0xF695,0x7FA7,0xF374,0x7F62,0xF055,0x7F0A,0xED38,0x7E9D,0xEA1E,0x7E1E,0xE707,0x7D8A,0xE3F4,0x7CE4,0xE0E6,0x7C2A,0xDDDC,0x7B5D,0xDAD8,0x7A7D,0xD7D9,0x798A,0xD4E1,0x7885,0xD1EF,0x776C,0xCF04,0x7642,0xCC21,0x7505,0xC946,0x73B6,0xC673,0x7255,0xC3A9,0x70E3,0xC0E9,0x6F5F,0xBE32,0x6DCA,0xBB85,0x6C24,0xB8E3,0x6A6E,0xB64C,0x68A7,0xB3C0,0x66D0,0xB140,0x64E9,0xAECC,0x62F2,0xAC65,0x60EC,0xAA0A,0x5ED7,0xA7BD,0x5CB4,0xA57E,0x5A82,0xA34C,0x5843,0xA129,0x55F6,0x9F14,0x539B,0x9D0E,0x5134,0x9B17,0x4EC0,0x9930,0x4C40,0x9759,0x49B4,0x9592,0x471D,0x93DC,0x447B,0x9236,0x41CE,0x90A1,0x3F17,0x8F1D,0x3C57,0x8DAB,0x398D,0x8C4A,0x36BA,0x8AFB,0x33DF,0x89BE,0x30FC,0x8894,0x2E11,0x877B,0x2B1F,0x8676,0x2827,0x8583,0x2528,0x84A3,0x2224,0x83D6,0x1F1A,0x831C,0x1C0C,0x8276,0x18F9,0x81E2,0x15E2,0x8163,0x12C8,0x80F6,0x0FAB,0x809E,0x0C8C,0x8059,0x096B,0x8027,0x0648,0x800A,0x0324,0x8000,0x0000,0x800A,0xFCDC,0x8027,0xF9B8,0x8059,0xF695,0x809E,0xF374,0x80F6,0xF055,0x8163,0xED38,0x81E2,0xEA1E,0x8276,0xE707,0x831C,0xE3F4,0x83D6,0xE0E6,0x84A3,0xDDDC,0x8583,0xDAD8,0x8676,0xD7D9,0x877B,0xD4E1,0x8894,0xD1EF,0x89BE,0xCF04,0x8AFB,0xCC21,0x8C4A,0xC946,0x8DAB,0xC673,0x8F1D,0xC3A9,0x90A1,0xC0E9,0x9236,0xBE32,0x93DC,0xBB85,0x9592,0xB8E3,0x9759,0xB64C,0x9930,0xB3C0,0x9B17,0xB140,0x9D0E,0xAECC,0x9F14,0xAC65,0xA129,0xAA0A,0xA34C,0xA7BD,0xA57E,0xA57E,0xA7BD,0xA34C,0xAA0A,0xA129,0xAC65,0x9F14,0xAECC,0x9D0E,0xB140,0x9B17,0xB3C0,0x9930,0xB64C,0x9759,0xB8E3,0x9592,0xBB85,0x93DC,0xBE32,0x9236,0xC0E9,0x90A1,0xC3A9,0x8F1D,0xC673,0x8DAB,0xC946,0x8C4A,0xCC21,0x8AFB,0xCF04,0x89BE,0xD1EF,0x8894,0xD4E1,0x877B,0xD7D9,0x8676,0xDAD8,0x8583,0xDDDC,0x84A3,0xE0E6,0x83D6,0xE3F4,0x831C,0xE707,0x8276,0xEA1E,0x81E2,0xED38,0x8163,0xF055,0x80F6,0xF374,0x809E,0xF695,0x8059,0xF9B8,0x8027,0xFCDC,0x800A,0x0000,0x8000,0x0324,0x800A,0x0648,0x8027,0x096B,0x8059,0x0C8C,0x809E,0x0FAB,0x80F6,0x12C8,0x8163,0x15E2,0x81E2,0x18F9,0x8276,0x1C0C,0x831C,0x1F1A,0x83D6,0x2224,0x84A3,0x2528,0x8583,0x2827,0x8676,0x2B1F,0x877B,0x2E11,0x8894,0x30FC,0x89BE,0x33DF,0x8AFB,0x36BA,0x8C4A,0x398D,0x8DAB,0x3C57,0x8F1D,0x3F17,0x90A1,0x41CE,0x9236,0x447B,0x93DC,0x471D,0x9592,0x49B4,0x9759,0x4C40,0x9930,0x4EC0,0x9B17,0x5134,0x9D0E,0x539B,0x9F14,0x55F6,0xA129,0x5843,0xA34C,0x5A82,0xA57E,0x5CB4,0xA7BD,0x5ED7,0xAA0A,0x60EC,0xAC65,0x62F2,0xAECC,0x64E9,0xB140,0x66D0,0xB3C0,0x68A7,0xB64C,0x6A6E,0xB8E3,0x6C24,0xBB85,0x6DCA,0xBE32,0x6F5F,0xC0E9,0x70E3,0xC3A9,0x7255,0xC673,0x73B6,0xC946,0x7505,0xCC21,0x7642,0xCF04,0x776C,0xD1EF,0x7885,0xD4E1,0x798A,0xD7D9,0x7A7D,0xDAD8,0x7B5D,0xDDDC,0x7C2A,0xE0E6,0x7CE4,0xE3F4,0x7D8A,0xE707,0x7E1E,0xEA1E,0x7E9D,0xED38,0x7F0A,0xF055,0x7F62,0xF374,0x7FA7,0xF695,0x7FD9,0xF9B8,0x7FF6,0xFCDC}; #endif #endif #ifdef DOUBLE #include #define PI 3.141592653589793 #endif #if !(defined(ARM_DS5) || defined(W8987)) int __clz(int x){ #ifdef ARM_GCC int ret; asm ("clz %0, %1 ; \n" : "=r"(ret) :"r"(x)); return ret; #else int ret = 0; while(!(x&0x80000000) && (ret<32)){ x<<=1; ret++; } return ret; #endif } #endif #if defined(ARM_GCC) || defined(ARM_DS5) #define REV_IDX(x) reverse(x) #define IDX_LEN 32 unsigned int reverse(register unsigned int x){ int ret; #ifdef ARM_DS5 __asm ("rbit ret, x"); #else asm ("rbit %0, %1 ; \n" : "=r"(ret) :"r"(x)); #endif return ret; } #else #if (MAX_FFT_SIZE > (1<<8)) #define REV_IDX(x) reverse(x) #define IDX_LEN 32 #else #define REV_IDX(x) reverse8(x) #define IDX_LEN 8 #endif unsigned int reverse(register unsigned int x){ x = (((x & 0xaaaaaaaa) >> 1) | ((x & 0x55555555) << 1)); x = (((x & 0xcccccccc) >> 2) | ((x & 0x33333333) << 2)); x = (((x & 0xf0f0f0f0) >> 4) | ((x & 0x0f0f0f0f) << 4)); x = (((x & 0xff00ff00) >> 8) | ((x & 0x00ff00ff) << 8)); return((x >> 16) | (x << 16)); } unsigned char reverse8(unsigned char b){ #ifdef _MSC_VER // 64 bit b = (unsigned char) ((b * 0x0202020202ULL & 0x010884422010ULL) % 1023); #else // 32 bit b = ((b * 0x0802LU & 0x22110LU) | (b * 0x8020LU & 0x88440LU)) * 0x10101LU >> 16; #endif return b; } #endif #if !(defined(ARM_GCC) || defined(ARM_DS5)) static const char my_msb_lut[32] = {0, 1, 28, 2, 29, 14, 24, 3, 30, 22, 20, 15, 25, 17, 4, 8, 31, 27, 13, 23, 21, 19, 16, 7, 26, 12, 18, 6, 11, 5, 10, 9}; int myMsb(unsigned int v){ v |= v >> 1; v |= v >> 2; v |= v >> 4; v |= v >> 8; v |= v >> 16; v = (v >> 1) + 1; return my_msb_lut[((unsigned int)(v * 0x077CB531UL)) >> 27]; } #ifdef ARM_DEBUG #define ROUND_CONST15BIT 0x4000 #define MAX_POS16BIT 0x7fff #define MIN_NEQ16BIT -0x7fff #define MAX_POS32BIT 0x7fffffff #define MIN_NEQ32BIT -0x7fffffff INT32 radix4Fft16BitTo32BitMultiplier(INT16 x, INT16 y){ INT32 temp = x; temp *=y; if(temp>MAX_POS32BIT) return MAX_POS32BIT; else{ if(tempMAX_POS32BIT) return MAX_POS32BIT; else{ if(tempMAX_POS32BIT) return MAX_POS32BIT; else{ if(temp>15)&0x1); #endif temp >>=15; if(temp>MAX_POS16BIT) return MAX_POS16BIT; else{ if(temp>=(15+extraShift); if(temp>0xffff) return 0xffff; else{ if(temp<0) return 0; else return (unsigned short)temp; } } INT16 radix4Fft16BitAdd(INT16 x, INT16 y){ INT32 temp = x; temp +=y; if(temp>MAX_POS16BIT) return MAX_POS16BIT; else{ if(tempMAX_POS16BIT) return MAX_POS16BIT; else{ if(temp>1); } INT16 radix4Fft16BitSubtractShift(INT16 x, INT16 y){ INT32 temp = x; temp -=y; return (INT16)(temp>>1); } #endif #else #pragma arm #endif void radix4FftRadix4ButterflyTwiddle(INT16* pSrc, INT16* pDst, int stride, const INT16* pCoeff, int coeffStride){ #if defined(ARM_GCC) || defined(ARM_DS5) #ifdef ARM_DS5 int reg5, reg6, reg7, reg8, reg9, reg10, reg11; int strideDw = 4*stride; int coeffStrideDw = 4*coeffStride; __asm volatile { LDR reg11, [pCoeff, coeffStrideDw]! LDR reg5, [pSrc], strideDw LDR reg7, [pSrc], strideDw LDR reg6, [pSrc], strideDw SMUAD reg10, reg11, reg6 SMUSDX reg9, reg11, reg6 LDR reg11, [pCoeff, coeffStrideDw]! LSL reg9, reg9, #1 PKHTB reg6, reg9, reg10, ASR #15 SMUAD reg10, reg11, reg7 SMUSDX reg9, reg11, reg7 LDR reg8, [pSrc], strideDw LDR reg11, [pCoeff, coeffStrideDw] LSL reg9, reg9, #1 PKHTB reg7, reg9, reg10, ASR #15 SMUAD reg10, reg11, reg8 SMUSDX reg8, reg11, reg8 LSL reg8, reg8, #1 PKHTB reg9, reg8, reg10, ASR #15 QADD16 reg10, reg5, reg7 QSUB16 reg11, reg5, reg7 QADD16 reg8, reg6, reg9 QSUB16 reg9, reg6, reg9 QADD16 reg5, reg10, reg8 QSUB16 reg7, reg10, reg8 QSAX reg6, reg11, reg9 QASX reg8, reg11, reg9 STR reg5, [pDst], strideDw STR reg6, [pDst], strideDw STR reg7, [pDst], strideDw STR reg8, [pDst], strideDw } #else asm volatile( "LDR r11, [%[pCoeff], %[coeffStrideDw]]! \n\t" "LDR r5, [%[pSrc]], %[strideDw] \n\t" "LDR r7, [%[pSrc]], %[strideDw] \n\t" "LDR r6, [%[pSrc]], %[strideDw] \n\t" "SMUAD r10, r11, r6 \n\t" "SMUSDX r9, r11, r6 \n\t" "LDR r11, [%[pCoeff], %[coeffStrideDw]]! \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r6, r9, r10, ASR #15 \n\t" "SMUAD r10, r11, r7 \n\t" "SMUSDX r9, r11, r7 \n\t" "LDR r8, [%[pSrc]], %[strideDw] \n\t" "LDR r11, [%[pCoeff], %[coeffStrideDw]] \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r7, r9, r10, ASR #15 \n\t" "SMUAD r10, r11, r8 \n\t" "SMUSDX r8, r11, r8 \n\t" "LSL r8, r8, #1 \n\t" "PKHTB r9, r8, r10, ASR #15 \n\t" "QADD16 r10, r5, r7 \n\t" "QSUB16 r11, r5, r7 \n\t" "QADD16 r8, r6, r9 \n\t" "QSUB16 r9, r6, r9 \n\t" "QADD16 r5, r10, r8 \n\t" "QSUB16 r7, r10, r8 \n\t" "QSAX r6, r11, r9 \n\t" "QASX r8, r11, r9 \n\t" "STR r5, [%[pDst]], %[strideDw] \n\t" "STR r6, [%[pDst]], %[strideDw] \n\t" "STR r7, [%[pDst]], %[strideDw] \n\t" "STR r8, [%[pDst]], %[strideDw] \n\t" : [pSrc]"+r"(pSrc), [pDst]"+r"(pDst) : [pCoeff]"r"(pCoeff), [strideDw]"r"(4*stride), [coeffStrideDw]"r"(4*coeffStride) : "r5", "r6", "r7", "r8", "r9", "r10","r11"); /* "SMULBT r8, r6, r11 \n\t" "SMULTB r9, r6, r11 \n\t" "QSUB r9, r9, r8 \n\t" */ #endif #else INT16 x1I, x1Q, x2I, x2Q, x3I, x3Q; // x0I, x0Q, INT16 z0I, z0Q, z1I, z1Q, z2I, z2Q, z3I, z3Q; INT16 y0I, y0Q, y1I, y1Q, y2I, y2Q, y3I, y3Q; INT16 AI, AQ, BI, BQ, CI, CQ, DI, DQ; INT16 W1I, W1Q, W2I, W2Q, W3I, W3Q; // W0I, W0Q, unsigned int *loadPtr = (unsigned int *)pSrc; unsigned int loadTemp; unsigned int *coeffPtr = (unsigned int *)(pCoeff +2*coeffStride); unsigned int loadCoeff; unsigned int *storePtr = (unsigned int *)pDst; unsigned int storeTemp; // re-order due to L^4_2 - because of using radix-2 bit-reversal loadTemp = *loadPtr; loadPtr +=stride; z0I = (loadTemp&0xffff); z0Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x2I = (loadTemp&0xffff); x2Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x3I = (loadTemp&0xffff); x3Q = (loadTemp>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W1I = (loadCoeff&0xffff); W1Q = (loadCoeff>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W2I = (loadCoeff&0xffff); W2Q = (loadCoeff>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W3I = (loadCoeff&0xffff); W3Q = (loadCoeff>>16); // z0 = W0*x0, z1 = W1*x1, z2 = W2*x2, z3 = W3*x3 assuming W0Q = -exp(1i*2*pi*nk/N) #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); z2I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W2I, x2I), radix4Fft16BitTo32BitMultiplier(W2Q, x2Q))); // r7 z2Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W2I, x2Q), radix4Fft16BitTo32BitMultiplier(W2Q, x2I))); z3I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W3I, x3I), radix4Fft16BitTo32BitMultiplier(W3Q, x3Q))); // r9 z3Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W3I, x3Q), radix4Fft16BitTo32BitMultiplier(W3Q, x3I))); #else z1I = (INT16)((((INT32)W1I*x1I)+((INT32)W1Q*x1Q))>>15); z1Q = (INT16)((((INT32)W1I*x1Q)-((INT32)W1Q*x1I))>>15); z2I = (INT16)((((INT32)W2I*x2I)+((INT32)W2Q*x2Q))>>15); z2Q = (INT16)((((INT32)W2I*x2Q)-((INT32)W2Q*x2I))>>15); z3I = (INT16)((((INT32)W3I*x3I)+((INT32)W3Q*x3Q))>>15); z3Q = (INT16)((((INT32)W3I*x3Q)-((INT32)W3Q*x3I))>>15); #endif // calculate using A = (z0+z2), B = (z0-z2), C = (z1+z3), D = (z1-z3); // y0 = z0 + z1 + z2 + z3 = A + C // y1 = z0 -jz1 - z2 +jz3 = B -jD // y2 = z0 - z1 + z2 - z3 = A - C // y3 = z0 +jz1 - z2 -jz3 = B +jD #ifdef ARM_DEBUG AI = radix4Fft16BitAdd(z0I, z2I); AQ = radix4Fft16BitAdd(z0Q, z2Q); BI = radix4Fft16BitSubtract(z0I, z2I); BQ = radix4Fft16BitSubtract(z0Q, z2Q); CI = radix4Fft16BitAdd(z1I, z3I); CQ = radix4Fft16BitAdd(z1Q, z3Q); DI = radix4Fft16BitSubtract(z1I, z3I); DQ = radix4Fft16BitSubtract(z1Q, z3Q); y0I = radix4Fft16BitAdd(AI, CI); y0Q = radix4Fft16BitAdd(AQ, CQ); y1I = radix4Fft16BitAdd(BI, DQ); y1Q = radix4Fft16BitSubtract(BQ, DI); y2I = radix4Fft16BitSubtract(AI, CI); y2Q = radix4Fft16BitSubtract(AQ, CQ); y3I = radix4Fft16BitSubtract(BI, DQ); y3Q = radix4Fft16BitAdd(BQ, DI); #else AI = z0I+z2I; AQ = z0Q+z2Q; BI = z0I-z2I; BQ = z0Q-z2Q; CI = z1I+z3I; CQ = z1Q+z3Q; DI = z1I-z3I; DQ = z1Q-z3Q; y0I = AI+CI; y0Q = AQ+CQ; y1I = BI+DQ; y1Q = BQ-DI; y2I = AI-CI; y2Q = AQ-CQ; y3I = BI-DQ; y3Q = BQ+DI; #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y2Q; storeTemp<<=16; storeTemp |=(unsigned short)y2I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y3Q; storeTemp<<=16; storeTemp |=(unsigned short)y3I; *storePtr = storeTemp; storePtr+=stride; #endif } void radix4IfftRadix4ButterflyTwiddle(INT16* pSrc, INT16* pDst, int stride, const INT16* pCoeff, int coeffStride){ #if defined(ARM_GCC) || defined(ARM_DS5) #ifdef ARM_DS5 int coeffStrideMult4 = 4*coeffStride; int strideMult4 = 4*stride; int reg5, reg6, reg7, reg8, reg9, reg10, reg11; __asm volatile { LDR reg11, [pCoeff, coeffStrideMult4]! LDR reg5, [pSrc], strideMult4 LDR reg7, [pSrc], strideMult4 LDR reg6, [pSrc], strideMult4 SMUSD reg10, reg11, reg6 SMUADX reg9, reg11, reg6 LDR reg11, [pCoeff, coeffStrideMult4]! LSL reg9, reg9, #1 PKHTB reg6, reg9, reg10, ASR #15 SMUSD reg10, reg11, reg7 SMUADX reg9, reg11, reg7 LDR reg8, [pSrc], strideMult4 LDR reg11, [pCoeff, coeffStrideMult4] LSL reg9, reg9, #1 PKHTB reg7, reg9, reg10, ASR #15 SMUSD reg10, reg11, reg8 SMUADX reg8, reg11, reg8 LSL reg8, reg8, #1 PKHTB reg9, reg8, reg10, ASR #15 SHADD16 reg10, reg5, reg7 SHSUB16 reg11, reg5, reg7 SHADD16 reg8, reg6, reg9 SHSUB16 reg9, reg6, reg9 SHADD16 reg5, reg10, reg8 SHSUB16 reg7, reg10, reg8 SHASX reg6, reg11, reg9 SHSAX reg8, reg11, reg9 STR reg5, [pDst], strideMult4 STR reg6, [pDst], strideMult4 STR reg7, [pDst], strideMult4 STR reg8, [pDst], strideMult4 } #else asm volatile( "LDR r11, [%[pCoeff], %[coeffStrideDw]]! \n\t" "LDR r5, [%[pSrc]], %[strideDw] \n\t" "LDR r7, [%[pSrc]], %[strideDw] \n\t" "LDR r6, [%[pSrc]], %[strideDw] \n\t" "SMUSD r10, r11, r6 \n\t" "SMUADX r9, r11, r6 \n\t" "LDR r11, [%[pCoeff], %[coeffStrideDw]]! \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r6, r9, r10, ASR #15 \n\t" "SMUSD r10, r11, r7 \n\t" "SMUADX r9, r11, r7 \n\t" "LDR r8, [%[pSrc]], %[strideDw] \n\t" "LDR r11, [%[pCoeff], %[coeffStrideDw]] \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r7, r9, r10, ASR #15 \n\t" "SMUSD r10, r11, r8 \n\t" "SMUADX r8, r11, r8 \n\t" "LSL r8, r8, #1 \n\t" "PKHTB r9, r8, r10, ASR #15 \n\t" "SHADD16 r10, r5, r7 \n\t" "SHSUB16 r11, r5, r7 \n\t" "SHADD16 r8, r6, r9 \n\t" "SHSUB16 r9, r6, r9 \n\t" "SHADD16 r5, r10, r8 \n\t" "SHSUB16 r7, r10, r8 \n\t" "SHASX r6, r11, r9 \n\t" "SHSAX r8, r11, r9 \n\t" "STR r5, [%[pDst]], %[strideDw] \n\t" "STR r6, [%[pDst]], %[strideDw] \n\t" "STR r7, [%[pDst]], %[strideDw] \n\t" "STR r8, [%[pDst]], %[strideDw] \n\t" : [pSrc]"+r"(pSrc), [pDst]"+r"(pDst) : [pCoeff]"r"(pCoeff), [strideDw]"r"(4*stride), [coeffStrideDw]"r"(4*coeffStride) : "r5", "r6", "r7", "r8", "r9", "r10","r11"); /* "SMULBT r8, r6, r11 \n\t" "SMULTB r9, r6, r11 \n\t" "QSUB r9, r9, r8 \n\t" */ #endif #else INT16 x1I, x1Q, x2I, x2Q, x3I, x3Q; // x0I, x0Q, INT16 z0I, z0Q, z1I, z1Q, z2I, z2Q, z3I, z3Q; INT16 y0I, y0Q, y1I, y1Q, y2I, y2Q, y3I, y3Q; INT16 AI, AQ, BI, BQ, CI, CQ, DI, DQ; INT16 W1I, W1Q, W2I, W2Q, W3I, W3Q; // W0I, W0Q, unsigned int *loadPtr = (unsigned int *)pSrc; unsigned int loadTemp; unsigned int *coeffPtr = (unsigned int *)(pCoeff +2*coeffStride); unsigned int loadCoeff; unsigned int *storePtr = (unsigned int *)pDst; unsigned int storeTemp; // re-order due to L^4_2 - because of using radix-2 bit-reversal loadTemp = *loadPtr; loadPtr +=stride; z0I = (loadTemp&0xffff); z0Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x2I = (loadTemp&0xffff); x2Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x3I = (loadTemp&0xffff); x3Q = (loadTemp>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W1I = (loadCoeff&0xffff); W1Q = (loadCoeff>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W2I = (loadCoeff&0xffff); W2Q = (loadCoeff>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W3I = (loadCoeff&0xffff); W3Q = (loadCoeff>>16); #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); z2I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W2I, x2I), radix4Fft16BitTo32BitMultiplier(W2Q, x2Q))); // r7 z2Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W2I, x2Q), radix4Fft16BitTo32BitMultiplier(W2Q, x2I))); z3I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W3I, x3I), radix4Fft16BitTo32BitMultiplier(W3Q, x3Q))); // r9 z3Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W3I, x3Q), radix4Fft16BitTo32BitMultiplier(W3Q, x3I))); #else z1I = (INT16)( (((INT32)W1I*x1I)-((INT32)W1Q*x1Q))>>15 ); z1Q = (INT16)( (((INT32)W1I*x1Q)+((INT32)W1Q*x1I))>>15 ); z2I = (INT16)( (((INT32)W2I*x2I)-((INT32)W2Q*x2Q))>>15 ); z2Q = (INT16)( (((INT32)W2I*x2Q)+((INT32)W2Q*x2I))>>15 ); z3I = (INT16)( (((INT32)W3I*x3I)-((INT32)W3Q*x3Q))>>15 ); z3Q = (INT16)( (((INT32)W3I*x3Q)+((INT32)W3Q*x3I))>>15 ); #endif // calculate using A = (z0+z2), B = (z0-z2), C = (z1+z3), D = (z1-z3); // y0 = z0 + z1 + z2 + z3 = A + C // y1 = z0 -jz1 - z2 +jz3 = B -jD // y2 = z0 - z1 + z2 - z3 = A - C // y3 = z0 +jz1 - z2 -jz3 = B +jD #ifdef ARM_DEBUG AI = radix4Fft16BitAddShift(z0I, z2I); AQ = radix4Fft16BitAddShift(z0Q, z2Q); BI = radix4Fft16BitSubtractShift(z0I, z2I); BQ = radix4Fft16BitSubtractShift(z0Q, z2Q); CI = radix4Fft16BitAddShift(z1I, z3I); CQ = radix4Fft16BitAddShift(z1Q, z3Q); DI = radix4Fft16BitSubtractShift(z1I, z3I); DQ = radix4Fft16BitSubtractShift(z1Q, z3Q); y0I = radix4Fft16BitAddShift(AI, CI); y0Q = radix4Fft16BitAddShift(AQ, CQ); y1I = radix4Fft16BitSubtractShift(BI, DQ); y1Q = radix4Fft16BitAddShift(BQ, DI); y2I = radix4Fft16BitSubtractShift(AI, CI); y2Q = radix4Fft16BitSubtractShift(AQ, CQ); y3I = radix4Fft16BitAddShift(BI, DQ); y3Q = radix4Fft16BitSubtractShift(BQ, DI); #else AI = (z0I+z2I)>>1; AQ = (z0Q+z2Q)>>1; BI = (z0I-z2I)>>1; BQ = (z0Q-z2Q)>>1; CI = (z1I+z3I)>>1; CQ = (z1Q+z3Q)>>1; DI = (z1I-z3I)>>1; DQ = (z1Q-z3Q)>>1; y0I = (AI+CI)>>1; y0Q = (AQ+CQ)>>1; y1I = (BI-DQ)>>1; y1Q = (BQ+DI)>>1; y2I = (AI-CI)>>1; y2Q = (AQ-CQ)>>1; y3I = (BI+DQ)>>1; y3Q = (BQ-DI)>>1; #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y2Q; storeTemp<<=16; storeTemp |=(unsigned short)y2I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y3Q; storeTemp<<=16; storeTemp |=(unsigned short)y3I; *storePtr = storeTemp; storePtr+=stride; #endif } #ifdef FFT_PARALLEL void radix4IfftRadix4ButterflyTwiddleParallel(INT16* pSrc, INT16* pDst, int stride, const INT16* pCoeff, int coeffStride) { int qq; INT16 x1I[NUM_PARALLEL], x1Q[NUM_PARALLEL], x2I[NUM_PARALLEL], x2Q[NUM_PARALLEL], x3I[NUM_PARALLEL], x3Q[NUM_PARALLEL]; // x0I, x0Q, INT16 z0I[NUM_PARALLEL], z0Q[NUM_PARALLEL], z1I[NUM_PARALLEL], z1Q[NUM_PARALLEL], z2I[NUM_PARALLEL], z2Q[NUM_PARALLEL], z3I[NUM_PARALLEL], z3Q[NUM_PARALLEL]; INT16 y0I[NUM_PARALLEL], y0Q[NUM_PARALLEL], y1I[NUM_PARALLEL], y1Q[NUM_PARALLEL], y2I[NUM_PARALLEL], y2Q[NUM_PARALLEL], y3I[NUM_PARALLEL], y3Q[NUM_PARALLEL]; INT16 AI[NUM_PARALLEL], AQ[NUM_PARALLEL], BI[NUM_PARALLEL], BQ[NUM_PARALLEL], CI[NUM_PARALLEL], CQ[NUM_PARALLEL], DI[NUM_PARALLEL], DQ[NUM_PARALLEL]; INT16 W1I, W1Q, W2I, W2Q, W3I, W3Q; // W0I, W0Q, unsigned int *loadPtr = (unsigned int *)pSrc; unsigned int loadTemp; unsigned int *coeffPtr = (unsigned int *)(pCoeff + 2 * coeffStride); unsigned int loadCoeff; unsigned int *storePtr = (unsigned int *)pDst; unsigned int storeTemp; // re-order due to L^4_2 - because of using radix-2 bit-reversal for (qq = 0;qq> 16); } loadPtr += stride; for (qq = 0;qq> 16); } loadPtr += stride; for (qq = 0;qq> 16); } loadPtr += stride; for (qq = 0;qq> 16); } loadCoeff = *coeffPtr; coeffPtr += coeffStride; W1I = (loadCoeff & 0xffff); W1Q = (loadCoeff >> 16); loadCoeff = *coeffPtr; coeffPtr += coeffStride; W2I = (loadCoeff & 0xffff); W2Q = (loadCoeff >> 16); loadCoeff = *coeffPtr; coeffPtr += coeffStride; W3I = (loadCoeff & 0xffff); W3Q = (loadCoeff >> 16); for (qq = 0;qq> 15); z1Q[qq] = (INT16)((((INT32)W1I*x1Q[qq]) + ((INT32)W1Q*x1I[qq])) >> 15); z2I[qq] = (INT16)((((INT32)W2I*x2I[qq]) - ((INT32)W2Q*x2Q[qq])) >> 15); z2Q[qq] = (INT16)((((INT32)W2I*x2Q[qq]) + ((INT32)W2Q*x2I[qq])) >> 15); z3I[qq] = (INT16)((((INT32)W3I*x3I[qq]) - ((INT32)W3Q*x3Q[qq])) >> 15); z3Q[qq] = (INT16)((((INT32)W3I*x3Q[qq]) + ((INT32)W3Q*x3I[qq])) >> 15); } // calculate using A = (z0+z2), B = (z0-z2), C = (z1+z3), D = (z1-z3); // y0 = z0 + z1 + z2 + z3 = A + C // y1 = z0 -jz1 - z2 +jz3 = B -jD // y2 = z0 - z1 + z2 - z3 = A - C // y3 = z0 +jz1 - z2 -jz3 = B +jD for (qq = 0;qq> 1; AQ[qq] = (z0Q[qq] + z2Q[qq]) >> 1; BI[qq] = (z0I[qq] - z2I[qq]) >> 1; BQ[qq] = (z0Q[qq] - z2Q[qq]) >> 1; CI[qq] = (z1I[qq] + z3I[qq]) >> 1; CQ[qq] = (z1Q[qq] + z3Q[qq]) >> 1; DI[qq] = (z1I[qq] - z3I[qq]) >> 1; DQ[qq] = (z1Q[qq] - z3Q[qq]) >> 1; } for (qq = 0;qq> 1; y0Q[qq] = (AQ[qq] + CQ[qq]) >> 1; y1I[qq] = (BI[qq] - DQ[qq]) >> 1; y1Q[qq] = (BQ[qq] + DI[qq]) >> 1; y2I[qq] = (AI[qq] - CI[qq]) >> 1; y2Q[qq] = (AQ[qq] - CQ[qq]) >> 1; y3I[qq] = (BI[qq] + DQ[qq]) >> 1; y3Q[qq] = (BQ[qq] - DI[qq]) >> 1; } for (qq = 0;qq>16); loadTemp = *loadPtr; loadPtr +=stride; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W1I = (loadCoeff&0xffff); W1Q = (loadCoeff>>16); #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); y0I = radix4Fft16BitAdd(z0I, z1I); y0Q = radix4Fft16BitAdd(z0Q, z1Q); y1I = radix4Fft16BitSubtract(z0I, z1I); y1Q = radix4Fft16BitSubtract(z0Q, z1Q); #else z1I = (INT16)( (((INT32)W1I*x1I)+((INT32)W1Q*x1Q))>>15 ); z1Q = (INT16)( (((INT32)W1I*x1Q)-((INT32)W1Q*x1I))>>15 ); y0I = (z0I+z1I); y0Q = (z0Q+z1Q); y1I = (z0I-z1I); y1Q = (z0Q-z1Q); #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr = storeTemp; storePtr+=stride; #endif } void radix4IfftRadix2ButterflyTwiddle(INT16* pSrc, INT16* pDst, int stride, const INT16* pCoeff, int coeffStride){ #if defined(ARM_GCC) || defined(ARM_DS5) #ifdef ARM_DS5 int reg5, reg6, reg7, reg8, reg9; __asm { LDR reg7, [pCoeff, (4*coeffStride)] LDR reg5, [pSrc] LDR reg6, [pSrc, (4*stride)] SMUSD reg8, reg7, reg6 SMUADX reg9, reg7, reg6 LSL reg7, reg9, #1 PKHTB reg6, reg7, reg8, ASR #15 SHADD16 reg7, reg5, reg6 SHSUB16 reg8, reg5, reg6 STR reg7, [pDst] STR reg8, [pDst, (4*stride)] } #else asm( "LDR r7, [%[pCoeff], %[coeffStrideDw]] \n\t" "LDR r5, [%[pSrc]] \n\t" "LDR r6, [%[pSrc], %[strideDw]] \n\t" "SMUSD r8, r7, r6 \n\t" "SMUADX r9, r7, r6 \n\t" "LSL r7, r9, #1 \n\t" "PKHTB r6, r7, r8, ASR #15 \n\t" "SHADD16 r7, r5, r6 \n\t" "SHSUB16 r8, r5, r6 \n\t" "STR r7, [%[pDst]] \n\t" "STR r8, [%[pDst], %[strideDw]] \n\t" :: [pSrc]"r"(pSrc), [pDst]"r"(pDst), [pCoeff]"r"(pCoeff), [strideDw]"r"(4*stride), [coeffStrideDw]"r"(4*coeffStride) : "r5", "r6", "r7", "r8", "r9"); #endif #else INT16 x1I, x1Q; // x0I, x0Q, INT16 z0I, z0Q, z1I, z1Q; INT16 y0I, y0Q, y1I, y1Q; INT16 W1I, W1Q; // W0I, W0Q, unsigned int *loadPtr = (unsigned int *)pSrc; unsigned int loadTemp; unsigned int *coeffPtr = (unsigned int *)(pCoeff +2*coeffStride); unsigned int loadCoeff; unsigned int *storePtr = (unsigned int *)pDst; unsigned int storeTemp; // re-order due to L^4_2 - because of using radix-2 bit-reversal loadTemp = *loadPtr; loadPtr +=stride; z0I = (loadTemp&0xffff); z0Q = (loadTemp>>16); loadTemp = *loadPtr; loadPtr +=stride; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); loadCoeff = *coeffPtr; coeffPtr +=coeffStride; W1I = (loadCoeff&0xffff); W1Q = (loadCoeff>>16); #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); y0I = radix4Fft16BitAddShift(z0I, z1I); y0Q = radix4Fft16BitAddShift(z0Q, z1Q); y1I = radix4Fft16BitSubtractShift(z0I, z1I); y1Q = radix4Fft16BitSubtractShift(z0Q, z1Q); #else z1I = ( (((INT32)W1I*x1I)-((INT32)W1Q*x1Q))>>15 ); z1Q = ( (((INT32)W1I*x1Q)+((INT32)W1Q*x1I))>>15 ); y0I = (z0I+z1I)>>1; y0Q = (z0Q+z1Q)>>1; y1I = (z0I-z1I)>>1; y1Q = (z0Q-z1Q)>>1; #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr = storeTemp; storePtr+=stride; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr = storeTemp; storePtr+=stride; #endif } #ifdef FFT_PARALLEL void radix4IfftRadix2ButterflyTwiddleParallel(INT16* pSrc, INT16* pDst, int stride, const INT16* pCoeff, int coeffStride) { int qq; INT16 x1I[NUM_PARALLEL], x1Q[NUM_PARALLEL]; // x0I, x0Q, INT16 z0I[NUM_PARALLEL], z0Q[NUM_PARALLEL], z1I[NUM_PARALLEL], z1Q[NUM_PARALLEL]; INT16 y0I[NUM_PARALLEL], y0Q[NUM_PARALLEL], y1I[NUM_PARALLEL], y1Q[NUM_PARALLEL]; INT16 W1I, W1Q; // W0I, W0Q, unsigned int *loadPtr = (unsigned int *)pSrc; unsigned int loadTemp; unsigned int *coeffPtr = (unsigned int *)(pCoeff + 2 * coeffStride); unsigned int loadCoeff; unsigned int *storePtr = (unsigned int *)pDst; unsigned int storeTemp; // re-order due to L^4_2 - because of using radix-2 bit-reversal for (qq = 0;qq> 16); } loadPtr += stride; for (qq = 0;qq> 16); } loadPtr += stride; loadCoeff = *coeffPtr; coeffPtr += coeffStride; W1I = (loadCoeff & 0xffff); W1Q = (loadCoeff >> 16); for (qq = 0;qq> 15); z1Q[qq] = ((((INT32)W1I*x1Q[qq]) + ((INT32)W1Q*x1I[qq])) >> 15); } for (qq = 0;qq> 1; y0Q[qq] = (z0Q[qq] + z1Q[qq]) >> 1; y1I[qq] = (z0I[qq] - z1I[qq]) >> 1; y1Q[qq] = (z0Q[qq] - z1Q[qq]) >> 1; } for (qq = 0;qq>2; for(nn=loopLen;nn>0;nn--){ // first stage does not need saturation #ifdef ARM_DS5 __asm volatile { RBIT reg4, index1 LDR reg5, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg6, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg7, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg8, [pSrc, reg4] ADD index1, index1, revBitInc SADD16 reg9, reg5, reg6 SSUB16 reg10, reg5, reg6 SADD16 reg4, reg7, reg8 SSUB16 reg5, reg7, reg8 SADD16 reg6, reg9, reg4 SSUB16 reg8, reg9, reg4 SSAX reg7, reg10, reg5 SASX reg9, reg10, reg5 STR reg6, [pDst], #4 STR reg7, [pDst], #4 STR reg8, [pDst], #4 STR reg9, [pDst], #4 } /// STM pDst!, {reg6-reg9} #else asm volatile ( "RBIT r4, %[index1] \n\t" "LDR r5, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r6, [%[pSrc], r4]\n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r7, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r8, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "SADD16 r9, r5, r6 \n\t" "SSUB16 r10, r5, r6 \n\t" "SADD16 r4, r7, r8 \n\t" "SSUB16 r5, r7, r8 \n\t" "SADD16 r6, r9, r4 \n\t" "SSUB16 r8, r9, r4 \n\t" "SSAX r7, r10, r5 \n\t" "SASX r9, r10, r5 \n\t" "STM %[pDst]!, {r6-r9} \n\t" : [pDst]"+r"(pDst), [index1]"+r"(index1) : [pSrc]"r"(pSrc), [revBitInc]"r"(revBitInc) : "r4", "r5", "r6", "r7", "r8", "r9","r10","memory"); #endif } } /* "STRD r6, r7, [%[pDst]], #8 \n\t" "STRD r8, r9, [%[pDst]], #8 \n\t" */ void radix4IfftStageOne(INT16* pSrc, INT16* pDst, int Nfft){ #ifdef ARM_DS5 int reg4, reg5, reg6, reg7, reg8, reg9, reg10; #endif int nn; int loopLen; unsigned int index1 = 0; // bit-reversal permutation int revBitInc = 1<<(1+__clz(Nfft)-2); // -2 for 4 byte LD loopLen = Nfft>>2; for(nn=loopLen;nn>0;nn--){ // first stage does not need saturation #ifdef ARM_DS5 __asm volatile { RBIT reg4, index1 LDR reg5, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg6, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg7, [pSrc, reg4] ADD index1, index1, revBitInc RBIT reg4, index1 LDR reg8, [pSrc, reg4] ADD index1, index1, revBitInc SHADD16 reg9, reg5, reg6 SHSUB16 reg10, reg5, reg6 SHADD16 reg4, reg7, reg8 SHSUB16 reg5, reg7, reg8 SHADD16 reg6, reg9, reg4 SHSUB16 reg8, reg9, reg4 SHASX reg7, reg10, reg5 SHSAX reg9, reg10, reg5 STR reg6, [pDst], #4 STR reg7, [pDst], #4 STR reg8, [pDst], #4 STR reg9, [pDst], #4 } // STM pDst!, {reg6, reg7, reg8, reg9} #else asm volatile ( "RBIT r4, %[index1] \n\t" "LDR r5, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r6, [%[pSrc], r4]\n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r7, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "RBIT r4, %[index1] \n\t" "LDR r8, [%[pSrc], r4] \n\t" "ADD %[index1], %[index1], %[revBitInc] \n\t" "SHADD16 r9, r5, r6 \n\t" "SHSUB16 r10, r5, r6 \n\t" "SHADD16 r4, r7, r8 \n\t" "SHSUB16 r5, r7, r8 \n\t" "SHADD16 r6, r9, r4 \n\t" "SHSUB16 r8, r9, r4 \n\t" "SHASX r7, r10, r5 \n\t" "SHSAX r9, r10, r5 \n\t" "STM %[pDst]!, {r6-r9} \n\t" : [pDst]"+r"(pDst), [index1]"+r"(index1) : [pSrc]"r"(pSrc), [revBitInc]"r"(revBitInc) : "r4", "r5", "r6", "r7", "r8", "r9","r10","memory"); #endif } } #else void radix4FftRadix4ButterflyBrev(INT16* pSrc, INT16* pDst, unsigned int index1, int log2Nfft){ INT16 x0I, x0Q, x1I, x1Q, x2I, x2Q, x3I, x3Q; INT16 y0I, y0Q, y1I, y1Q, y2I, y2Q, y3I, y3Q; INT16 AI, AQ, BI, BQ, CI, CQ, DI, DQ; unsigned int index2; unsigned int *loadPtr = (unsigned int*)pSrc; unsigned int *storePtr = (unsigned int*)pDst; unsigned int loadTemp, storeTemp; int bitShift = IDX_LEN-log2Nfft; // re-order due to L^4_2 - because of using radix-2 bit-reversal index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x0I = (loadTemp&0xffff); x0Q = (loadTemp>>16); //x0I = pSrc[2*index2]; x0Q = pSrc[2*index2+1]; // r5 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x2I = (loadTemp&0xffff); x2Q = (loadTemp>>16); //x2I = pSrc[2*index2]; x2Q = pSrc[2*index2+1]; // r6 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); //x1I = pSrc[2*index2]; x1Q = pSrc[2*index2+1]; // r7 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x3I = (loadTemp&0xffff); x3Q = (loadTemp>>16); //x3I = pSrc[2*index2]; x3Q = pSrc[2*index2+1]; // r8 #ifdef ARM_DEBUG AI = radix4Fft16BitAdd(x0I, x2I); AQ = radix4Fft16BitAdd(x0Q, x2Q); // r4 BI = radix4Fft16BitSubtract(x0I, x2I); BQ = radix4Fft16BitSubtract(x0Q, x2Q); // r5 CI = radix4Fft16BitAdd(x1I, x3I); CQ = radix4Fft16BitAdd(x1Q, x3Q); // r6 DI = radix4Fft16BitSubtract(x1I, x3I); DQ = radix4Fft16BitSubtract(x1Q, x3Q); // r7 y0I = radix4Fft16BitAdd(AI, CI); y0Q = radix4Fft16BitAdd(AQ, CQ); // r8 y2I = radix4Fft16BitSubtract(AI, CI); y2Q = radix4Fft16BitSubtract(AQ, CQ); // r4 y3I = radix4Fft16BitSubtract(BI, DQ); y3Q = radix4Fft16BitAdd(BQ, DI); // r6 y1I = radix4Fft16BitAdd(BI, DQ); y1Q = radix4Fft16BitSubtract(BQ, DI); // r5 #else AI = x0I+x2I; AQ = x0Q+x2Q; BI = x0I-x2I; BQ = x0Q-x2Q; CI = x1I+x3I; CQ = x1Q+x3Q; DI = x1I-x3I; DQ = x1Q-x3Q; y0I = AI+CI; y0Q = AQ+CQ; y2I = AI-CI; y2Q = AQ-CQ; y3I = BI-DQ; y3Q = BQ+DI; y1I = BI+DQ; y1Q = BQ-DI; #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y2Q; storeTemp<<=16; storeTemp |=(unsigned short)y2I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y3Q; storeTemp<<=16; storeTemp |=(unsigned short)y3I; *storePtr++ = storeTemp; } void radix4IfftRadix4ButterflyBrev(INT16* pSrc, INT16* pDst, unsigned int index1, int log2Nfft){ INT16 x0I, x0Q, x1I, x1Q, x2I, x2Q, x3I, x3Q; INT16 y0I, y0Q, y1I, y1Q, y2I, y2Q, y3I, y3Q; INT16 AI, AQ, BI, BQ, CI, CQ, DI, DQ; unsigned int index2; unsigned int *loadPtr = (unsigned int*)pSrc; unsigned int *storePtr = (unsigned int*)pDst; unsigned int loadTemp, storeTemp; int bitShift = IDX_LEN-log2Nfft; // re-order due to L^4_2 - because of using radix-2 bit-reversal index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x0I = (loadTemp&0xffff); x0Q = (loadTemp>>16); //x0I = pSrc[2*index2]; x0Q = pSrc[2*index2+1]; // r5 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x2I = (loadTemp&0xffff); x2Q = (loadTemp>>16); //x2I = pSrc[2*index2]; x2Q = pSrc[2*index2+1]; // r6 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); //x1I = pSrc[2*index2]; x1Q = pSrc[2*index2+1]; // r7 index2 = REV_IDX(index1++); index2>>=bitShift; loadTemp = loadPtr[index2]; x3I = (loadTemp&0xffff); x3Q = (loadTemp>>16); //x3I = pSrc[2*index2]; x3Q = pSrc[2*index2+1]; // r8 #ifdef ARM_DEBUG AI = radix4Fft16BitAddShift(x0I, x2I); AQ = radix4Fft16BitAddShift(x0Q, x2Q); // r4 BI = radix4Fft16BitSubtractShift(x0I, x2I); BQ = radix4Fft16BitSubtractShift(x0Q, x2Q); // r5 CI = radix4Fft16BitAddShift(x1I, x3I); CQ = radix4Fft16BitAddShift(x1Q, x3Q); // r6 DI = radix4Fft16BitSubtractShift(x1I, x3I); DQ = radix4Fft16BitSubtractShift(x1Q, x3Q); // r7 y0I = radix4Fft16BitAddShift(AI, CI); y0Q = radix4Fft16BitAddShift(AQ, CQ); // r8 y2I = radix4Fft16BitSubtractShift(AI, CI); y2Q = radix4Fft16BitSubtractShift(AQ, CQ); // r4 y3I = radix4Fft16BitAddShift(BI, DQ); y3Q = radix4Fft16BitSubtractShift(BQ, DI); // r6 y1I = radix4Fft16BitSubtractShift(BI, DQ); y1Q = radix4Fft16BitAddShift(BQ, DI); // r5 #else AI = (x0I+x2I)>>1; AQ = (x0Q+x2Q)>>1; BI = (x0I-x2I)>>1; BQ = (x0Q-x2Q)>>1; CI = (x1I+x3I)>>1; CQ = (x1Q+x3Q)>>1; DI = (x1I-x3I)>>1; DQ = (x1Q-x3Q)>>1; y0I = (AI+CI)>>1; y0Q = (AQ+CQ)>>1; y2I = (AI-CI)>>1; y2Q = (AQ-CQ)>>1; y3I = (BI+DQ)>>1; y3Q = (BQ-DI)>>1; y1I = (BI-DQ)>>1; y1Q = (BQ+DI)>>1; #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y2Q; storeTemp<<=16; storeTemp |=(unsigned short)y2I; *storePtr++ = storeTemp; storeTemp = (unsigned short)y3Q; storeTemp<<=16; storeTemp |=(unsigned short)y3I; *storePtr++ = storeTemp; } void radix4FftStageOne(INT16* pSrc, INT16* pDst, int Nfft){ int nn; int loopLen; INT16 *pSrcLoop, *pDstLoop; // bit-reversal permutation unsigned int index1; int log2Nfft = myMsb(Nfft); loopLen = Nfft>>2; pSrcLoop = pSrc; pDstLoop = pDst; index1 = 0; for(nn=0;nn>2; pSrcLoop = pSrc; pDstLoop = pDst; index1 = 0; for(nn=0;nn>= bitShift; loadTemp = loadPtr[NUM_PARALLEL * index2]; x0I = (loadTemp & 0xffff); x0Q = (loadTemp >> 16); index2 = REV_IDX(index1++); index2 >>= bitShift; loadTemp = loadPtr[NUM_PARALLEL * index2]; x2I = (loadTemp & 0xffff); x2Q = (loadTemp >> 16); index2 = REV_IDX(index1++); index2 >>= bitShift; loadTemp = loadPtr[NUM_PARALLEL * index2]; x1I = (loadTemp & 0xffff); x1Q = (loadTemp >> 16); index2 = REV_IDX(index1++); index2 >>= bitShift; loadTemp = loadPtr[NUM_PARALLEL * index2]; x3I = (loadTemp & 0xffff); x3Q = (loadTemp >> 16); AI = (x0I + x2I) >> 1; AQ = (x0Q + x2Q) >> 1; BI = (x0I - x2I) >> 1; BQ = (x0Q - x2Q) >> 1; CI = (x1I + x3I) >> 1; CQ = (x1Q + x3Q) >> 1; DI = (x1I - x3I) >> 1; DQ = (x1Q - x3Q) >> 1; y0I = (AI + CI) >> 1; y0Q = (AQ + CQ) >> 1; y2I = (AI - CI) >> 1; y2Q = (AQ - CQ) >> 1; y3I = (BI + DQ) >> 1; y3Q = (BQ - DI) >> 1; y1I = (BI - DQ) >> 1; y1Q = (BQ + DI) >> 1; storeTemp = (unsigned short)y0Q; storeTemp <<= 16; storeTemp |= (unsigned short)y0I; *storePtr = storeTemp; storePtr+=NUM_PARALLEL; storeTemp = (unsigned short)y1Q; storeTemp <<= 16; storeTemp |= (unsigned short)y1I; *storePtr = storeTemp; storePtr+= NUM_PARALLEL; storeTemp = (unsigned short)y2Q; storeTemp <<= 16; storeTemp |= (unsigned short)y2I; *storePtr = storeTemp; storePtr+= NUM_PARALLEL; storeTemp = (unsigned short)y3Q; storeTemp <<= 16; storeTemp |= (unsigned short)y3I; *storePtr = storeTemp; storePtr+= NUM_PARALLEL; } #endif #ifdef FFT_PARALLEL void radix4IfftRadix4ButterflyBrevParallel(INT16* pSrc, INT16* pDst, unsigned int index1, int log2Nfft) { int qq; INT16 x0I[NUM_PARALLEL], x0Q[NUM_PARALLEL], x1I[NUM_PARALLEL], x1Q[NUM_PARALLEL], x2I[NUM_PARALLEL], x2Q[NUM_PARALLEL], x3I[NUM_PARALLEL], x3Q[NUM_PARALLEL]; INT16 y0I[NUM_PARALLEL], y0Q[NUM_PARALLEL], y1I[NUM_PARALLEL], y1Q[NUM_PARALLEL], y2I[NUM_PARALLEL], y2Q[NUM_PARALLEL], y3I[NUM_PARALLEL], y3Q[NUM_PARALLEL]; INT16 AI[NUM_PARALLEL], AQ[NUM_PARALLEL], BI[NUM_PARALLEL], BQ[NUM_PARALLEL], CI[NUM_PARALLEL], CQ[NUM_PARALLEL], DI[NUM_PARALLEL], DQ[NUM_PARALLEL]; unsigned int index2; unsigned int *loadPtr = (unsigned int*)pSrc; unsigned int *storePtr = (unsigned int*)pDst; unsigned int loadTemp, storeTemp; int bitShift = 32 - log2Nfft; // re-order due to L^4_2 - because of using radix-2 bit-reversal index2 = REV_IDX(index1++); index2 >>= bitShift; for (qq = 0;qq> 16); } index2 = REV_IDX(index1++); index2 >>= bitShift; for (qq = 0;qq> 16); } index2 = REV_IDX(index1++); index2 >>= bitShift; for (qq = 0;qq> 16); } index2 = REV_IDX(index1++); index2 >>= bitShift; for (qq = 0;qq> 16); } for (qq = 0;qq> 1; AQ[qq] = (x0Q[qq] + x2Q[qq]) >> 1; BI[qq] = (x0I[qq] - x2I[qq]) >> 1; BQ[qq] = (x0Q[qq] - x2Q[qq]) >> 1; CI[qq] = (x1I[qq] + x3I[qq]) >> 1; CQ[qq] = (x1Q[qq] + x3Q[qq]) >> 1; DI[qq] = (x1I[qq] - x3I[qq]) >> 1; DQ[qq] = (x1Q[qq] - x3Q[qq]) >> 1; } for (qq = 0;qq> 1; y0Q[qq] = (AQ[qq] + CQ[qq]) >> 1; y2I[qq] = (AI[qq] - CI[qq]) >> 1; y2Q[qq] = (AQ[qq] - CQ[qq]) >> 1; y3I[qq] = (BI[qq] + DQ[qq]) >> 1; y3Q[qq] = (BQ[qq] - DI[qq]) >> 1; y1I[qq] = (BI[qq] - DQ[qq]) >> 1; y1Q[qq] = (BQ[qq] + DI[qq]) >> 1; } for (qq = 0;qq> 2; pSrcLoop = pSrc; pDstLoop = pDst; index1 = 0; for (nn = 0;nn> 2; pSrcLoop = pSrc; pDstLoop = pDst; index1 = 0; for (nn = 0;nn>1; int radix2 = log2Nfft&0x1; INT16 *pSrcLoop, *pDstLoop; int coeffStride, coeffStrideTemp; int ii, mm, nn; loop1 = Nfft>>2; loop2 = 1; // next stages coeffStrideTemp = lenCoeff>>2; for(ii=1;ii>=2; loop2 <<=2; coeffStrideTemp>>=2; pSrcLoop = pDst; pDstLoop = pDst; coeffStride = 0; for(mm=0;mm0;nn--){ radix4FftRadix4ButterflyTwiddle(pSrcLoop, pDstLoop, loop2, pCoeff, coeffStride); pSrcLoop += 2*4*loop2; pDstLoop += 2*4*loop2; } pSrcLoop += 2 -2*4*loop2*loop1; pDstLoop += 2 -2*4*loop2*loop1; coeffStride += coeffStrideTemp; // tabel size is fixed } } } void radix4IfftMainStages(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff){ int loop1, loop2; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int log4Nfft = log2Nfft>>1; int radix2 = log2Nfft&0x1; INT16 *pSrcLoop, *pDstLoop; int coeffStride, coeffStrideTemp; int ii, mm, nn; loop1 = Nfft>>2; loop2 = 1; // next stages coeffStrideTemp = lenCoeff>>2; for(ii=1;ii>=2; loop2 <<=2; coeffStrideTemp>>=2; pSrcLoop = pDst; pDstLoop = pDst; coeffStride = 0; for(mm=0;mm0;nn--){ radix4IfftRadix4ButterflyTwiddle(pSrcLoop, pDstLoop, NUM_PARALLEL*loop2, pCoeff, coeffStride); pSrcLoop += NUM_PARALLEL * 2*4*loop2; pDstLoop += NUM_PARALLEL * 2*4*loop2; } pSrcLoop += NUM_PARALLEL * (2 -2*4*loop2*loop1); pDstLoop += NUM_PARALLEL * (2 -2*4*loop2*loop1); coeffStride += coeffStrideTemp; // tabel size is fixed } } } #ifdef FFT_PARALLEL void radix4IfftMainStagesParallel(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff) { int loop1, loop2; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int log4Nfft = log2Nfft >> 1; int radix2 = log2Nfft & 0x1; INT16 *pSrcLoop, *pDstLoop; int coeffStride, coeffStrideTemp; int ii, mm, nn; loop1 = Nfft >> 2; loop2 = 1; // next stages coeffStrideTemp = lenCoeff >> 2; for (ii = 1;ii>= 2; loop2 <<= 2; coeffStrideTemp >>= 2; pSrcLoop = pDst; pDstLoop = pDst; coeffStride = 0; for (mm = 0;mm0;nn--) { radix4IfftRadix4ButterflyTwiddleParallel(pSrcLoop, pDstLoop, NUM_PARALLEL*loop2, pCoeff, coeffStride); pSrcLoop += NUM_PARALLEL * 2 * 4 * loop2; pDstLoop += NUM_PARALLEL * 2 * 4 * loop2; } pSrcLoop += NUM_PARALLEL * (2 - 2 * 4 * loop2*loop1); pDstLoop += NUM_PARALLEL * (2 - 2 * 4 * loop2*loop1); coeffStride += coeffStrideTemp; // tabel size is fixed } } } #endif void myBitRev(INT16* pSrc, int Nfft){ int ii, jj; // , mm unsigned int tempVal; unsigned int *dataPtr = (unsigned int *)pSrc; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN-log2Nfft; //jj = 0; for(ii=0;ii>bitShift; if(jj > ii){ tempVal = dataPtr[jj]; dataPtr[jj] = dataPtr[ii]; dataPtr[ii] = tempVal; } } } void radix2IfftMainStages(INT16* pBfr, int Nfft, const INT16* pCoeff, int lenCoeff){ int loop1, loop2; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif INT16 *pSrcLoop; int coeffStride, coeffStrideTemp; int ii, mm, nn; loop1 = Nfft>>1; loop2 = 1; // next stages coeffStrideTemp = lenCoeff>>1; for(ii=0;ii0;nn--){ radix4IfftRadix2ButterflyTwiddle(pSrcLoop, pSrcLoop, loop2, pCoeff, coeffStride); pSrcLoop += 2*2*loop2; } pSrcLoop += 2 -2*2*loop2*loop1; coeffStride += coeffStrideTemp; // tabel size is fixed } loop1 >>=1; loop2 <<=1; coeffStrideTemp>>=1; } } void radix4FftLastStage(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff){ int mm, loopLen, coeffStride; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int log4Nfft = log2Nfft>>1; int radix2 = log2Nfft&0x1; int coeffStrideTemp = lenCoeff>>(2*log4Nfft+radix2); INT16 *pSrcLoop = pDst; INT16 *pDstLoop = pDst; if(radix2){ loopLen = Nfft>>1; coeffStride = 0; for(mm=loopLen;mm>0;mm--){ radix4FftRadix2ButterflyTwiddle(pSrcLoop, pDstLoop, loopLen, pCoeff, coeffStride); pSrcLoop += 2; pDstLoop += 2; coeffStride += coeffStrideTemp; } } else{ // last iteration for even power of two loopLen = Nfft>>2; coeffStride = 0; for(mm=loopLen;mm>0;mm--){ radix4FftRadix4ButterflyTwiddle(pSrcLoop, pDstLoop, loopLen, pCoeff, coeffStride); pSrcLoop += 2; pDstLoop += 2; coeffStride += coeffStrideTemp; } } } void radix4IfftLastStage(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff){ int mm, loopLen, coeffStride; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int log4Nfft = log2Nfft>>1; int radix2 = log2Nfft&0x1; int coeffStrideTemp = lenCoeff>>(2*log4Nfft+radix2); INT16 *pSrcLoop = pDst; INT16 *pDstLoop = pDst; if(radix2){ loopLen = Nfft>>1; coeffStride = 0; for(mm=loopLen;mm>0;mm--){ radix4IfftRadix2ButterflyTwiddle(pSrcLoop, pDstLoop, NUM_PARALLEL*loopLen, pCoeff, coeffStride); pSrcLoop += 2 * NUM_PARALLEL; pDstLoop += 2 * NUM_PARALLEL; coeffStride += coeffStrideTemp; } } else{ // last iteration for even power of two loopLen = Nfft>>2; coeffStride = 0; for(mm=loopLen;mm>0;mm--){ radix4IfftRadix4ButterflyTwiddle(pSrcLoop, pDstLoop, NUM_PARALLEL*loopLen, pCoeff, coeffStride); pSrcLoop += 2 * NUM_PARALLEL; pDstLoop += 2 * NUM_PARALLEL; coeffStride += coeffStrideTemp; } } } #ifdef FFT_PARALLEL void radix4IfftLastStageParallel(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff) { int mm, loopLen, coeffStride; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int log4Nfft = log2Nfft >> 1; int radix2 = log2Nfft & 0x1; int coeffStrideTemp = lenCoeff >> (2 * log4Nfft + radix2); INT16 *pSrcLoop, *pDstLoop; pSrcLoop = pDst; pDstLoop = pDst; if (radix2) { loopLen = Nfft >> 1; coeffStride = 0; for (mm = loopLen;mm>0;mm--) { radix4IfftRadix2ButterflyTwiddleParallel(pSrcLoop, pDstLoop, NUM_PARALLEL*loopLen, pCoeff, coeffStride); pSrcLoop += 2 * NUM_PARALLEL; pDstLoop += 2 * NUM_PARALLEL; coeffStride += coeffStrideTemp; } } else { // last iteration for even power of two loopLen = Nfft >> 2; coeffStride = 0; for (mm = loopLen;mm>0;mm--) { radix4IfftRadix4ButterflyTwiddleParallel(pSrcLoop, pDstLoop, NUM_PARALLEL*loopLen, pCoeff, coeffStride); pSrcLoop += 2 * NUM_PARALLEL; pDstLoop += 2 * NUM_PARALLEL; coeffStride += coeffStrideTemp; } } } #endif // only uses two buffers, out-of-place in first stage (includes bit-reveral), then in-place void radix4Fft(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff){ radix4FftStageOne(pSrc, pDst, Nfft); radix4FftMainStages(pSrc, pDst, Nfft, pCoeff, lenCoeff); radix4FftLastStage(pSrc, pDst, Nfft, pCoeff, lenCoeff); } // same as FFT, inverse sign, scale by 2 to number of stages void radix4Ifft(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff){ radix4IfftStageOne(pSrc, pDst, Nfft); radix4IfftMainStages(pSrc, pDst, Nfft, pCoeff, lenCoeff); radix4IfftLastStage(pSrc, pDst, Nfft, pCoeff, lenCoeff); } #ifdef FFT_PARALLEL void radix4IfftParallel(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff) { radix4IfftStageOneParallel(pSrc, pDst, Nfft); radix4IfftMainStagesParallel(pSrc, pDst, Nfft, pCoeff, lenCoeff); radix4IfftLastStageParallel(pSrc, pDst, Nfft, pCoeff, lenCoeff); } #endif #ifdef FFT_STRIDE void radix4IfftStride(INT16* pSrc, INT16* pDst, int Nfft, const INT16* pCoeff, int lenCoeff) { radix4IfftStageOneStride(pSrc, pDst, Nfft); radix4IfftMainStages(pSrc, pDst, Nfft, pCoeff, lenCoeff); radix4IfftLastStage(pSrc, pDst, Nfft, pCoeff, lenCoeff); } #endif #define FFT_SCALE void radix2Ifft(INT16* pBfr, int Nfft, const INT16* pCoeff, int lenCoeff){ int ii, jj, mm, nn; int loop1, loop2, coeffStride; INT16 x1I, x1Q, W1I, W1Q; INT16 z0I, z0Q, z1I, z1Q; INT16 y0I, y0Q, y1I, y1Q; unsigned int loadCoeff; unsigned int *coeffPtr; unsigned int loadTemp, storeTemp; unsigned int *loopPtr; unsigned int tempVal0, tempVal1; unsigned int *dataPtr = (unsigned int *)pBfr; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31-__clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN-log2Nfft; // bit-reversal permutation for(ii=0;ii>bitShift; if(jj > ii){ // swap [ii] for [jj] tempVal0 = dataPtr[jj]; tempVal1 = dataPtr[ii]; dataPtr[ii] = tempVal0; dataPtr[jj] = tempVal1; } } loop1 = Nfft>>1; loop2 = 1; // radix-2 stages coeffStride = lenCoeff>>1; for(ii=0;ii>16); for(nn=loop1;nn>0;nn--){ loadTemp = loopPtr[0]; z0I = (loadTemp&0xffff); z0Q = (loadTemp>>16); loadTemp = loopPtr[loop2]; x1I = (loadTemp&0xffff); x1Q = (loadTemp>>16); #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); y0I = radix4Fft16BitAddShift(z0I, z1I); y0Q = radix4Fft16BitAddShift(z0Q, z1Q); y1I = radix4Fft16BitSubtractShift(z0I, z1I); y1Q = radix4Fft16BitSubtractShift(z0Q, z1Q); #else z1I = ( (((INT32)W1I*x1I)-((INT32)W1Q*x1Q))>>15 ); z1Q = ( (((INT32)W1I*x1Q)+((INT32)W1Q*x1I))>>15 ); #ifdef FFT_SCALE y0I = (z0I+z1I)>>1; y0Q = (z0Q+z1Q)>>1; y1I = (z0I-z1I)>>1; y1Q = (z0Q-z1Q)>>1; #else y0I = (z0I+z1I); y0Q = (z0Q+z1Q); y1I = (z0I-z1I); y1Q = (z0Q-z1Q); #endif #endif storeTemp = (unsigned short)y0Q; storeTemp<<=16; storeTemp |=(unsigned short)y0I; *loopPtr = storeTemp; loopPtr+=loop2; storeTemp = (unsigned short)y1Q; storeTemp<<=16; storeTemp |=(unsigned short)y1I; *loopPtr = storeTemp; loopPtr+=loop2; } loopPtr += 1 - 2*loop2*loop1; } loop1 >>=1; loop2 <<=1; coeffStride>>=1; } } const float twiddleTableFlt[2 * MAX_FFT_FLT] = { 1.00000000f, 0.00000000f, 0.995184720f, 0.0980171412f, 0.980785251f, 0.195090324f, 0.956940353f, 0.290284663f, 0.923879504f, 0.382683456f, 0.881921232f, 0.471396744f, 0.831469595f, 0.555570245f, 0.773010433f, 0.634393334f, 0.707106769f, 0.707106769f, 0.634393275f, 0.773010433f, 0.555570185f, 0.831469655f, 0.471396655f, 0.881921291f, 0.382683426f, 0.923879504f, 0.290284634f, 0.956940353f, 0.195090234f, 0.980785310f, 0.0980171338f, 0.995184720f, -4.37113883e-08f, 1.00000000f, -0.0980172232f, 0.995184720f, -0.195090324f, 0.980785251f, -0.290284723f, 0.956940293f, -0.382683516f, 0.923879504f, -0.471396834f, 0.881921232f, -0.555570364f, 0.831469536f, -0.634393275f, 0.773010492f, -0.707106769f, 0.707106769f, -0.773010492f, 0.634393275f, -0.831469655f, 0.555570185f, -0.881921351f, 0.471396625f, -0.923879623f, 0.382683277f, -0.956940353f, 0.290284723f, -0.980785310f, 0.195090309f, -0.995184720f, 0.0980170965f, -1.00000000f, -8.74227766e-08f, -0.995184720f, -0.0980172679f, -0.980785251f, -0.195090488f, -0.956940293f, -0.290284872f, -0.923879504f, -0.382683426f, -0.881921232f, -0.471396774f, -0.831469536f, -0.555570304f, -0.773010373f, -0.634393394f, -0.707106650f, -0.707106888f, -0.634393334f, -0.773010433f, -0.555570006f, -0.831469774f, -0.471396685f, -0.881921291f, -0.382683128f, -0.923879683f, -0.290284544f, -0.956940353f, -0.195090383f, -0.980785251f, -0.0980169326f, -0.995184720f, 1.19248806e-08f, -1.00000000f, 0.0980174318f, -0.995184720f, 0.195090413f, -0.980785251f, 0.290285021f, -0.956940234f, 0.382683605f, -0.923879445f, 0.471396714f, -0.881921291f, 0.555570424f, -0.831469476f, 0.634393334f, -0.773010433f, 0.707107008f, -0.707106531f, 0.773010552f, -0.634393156f, 0.831469595f, -0.555570304f, 0.881921351f, -0.471396536f, 0.923879564f, -0.382683426f, 0.956940413f, -0.290284395f, 0.980785310f, -0.195090234f, 0.995184779f, -0.0980167687f }; void radix2FftFlt(float* pBfr, int Nfft, const float* pCoeff, int lenCoeff) { int ii, jj, mm, nn; int loop1, loop2, coeffStride; float x1I, x1Q, W1I, W1Q; float z0I, z0Q, z1I, z1Q; float y0I, y0Q, y1I, y1Q; const float *coeffPtr; float *loopPtr; unsigned long long tempVal0, tempVal1; unsigned long long *dataPtr = (unsigned long long *)pBfr; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN - log2Nfft; // bit-reversal permutation for (ii = 0;ii> bitShift; if (jj > ii) { // swap [ii] for [jj] tempVal0 = dataPtr[jj]; tempVal1 = dataPtr[ii]; dataPtr[ii] = tempVal0; dataPtr[jj] = tempVal1; } } loop1 = Nfft >> 1; loop2 = 1; // radix-2 stages coeffStride = lenCoeff >> 1; for (ii = 0;ii0;nn--) { z0I = loopPtr[0]; z0Q = loopPtr[1]; x1I = loopPtr[2 * loop2]; x1Q = loopPtr[2 * loop2 + 1]; z1I = ((W1I*x1I) + (W1Q*x1Q)); z1Q = ((W1I*x1Q) - (W1Q*x1I)); y0I = (z0I + z1I); y0Q = (z0Q + z1Q); y1I = (z0I - z1I); y1Q = (z0Q - z1Q); loopPtr[0] = y0I; loopPtr[1] = y0Q; loopPtr += 2 * loop2; loopPtr[0] = y1I; loopPtr[1] = y1Q; loopPtr += 2 * loop2; } loopPtr += 2 - 4 * loop2*loop1; } loop1 >>= 1; loop2 <<= 1; coeffStride >>= 1; } } void radix2IfftFlt(float* pBfr, int Nfft, const float* pCoeff, int lenCoeff) { int ii, jj, mm, nn; int loop1, loop2, coeffStride; float x1I, x1Q, W1I, W1Q; float z0I, z0Q, z1I, z1Q; float y0I, y0Q, y1I, y1Q; const float *coeffPtr; float *loopPtr; unsigned long long tempVal0, tempVal1; unsigned long long *dataPtr = (unsigned long long *)pBfr; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN - log2Nfft; // bit-reversal permutation for (ii = 0;ii> bitShift; if (jj > ii) { // swap [ii] for [jj] tempVal0 = dataPtr[jj]; tempVal1 = dataPtr[ii]; dataPtr[ii] = tempVal0; dataPtr[jj] = tempVal1; } } loop1 = Nfft >> 1; loop2 = 1; // radix-2 stages coeffStride = lenCoeff >> 1; for (ii = 0;ii0;nn--) { z0I = loopPtr[0]; z0Q = loopPtr[1]; x1I = loopPtr[2 * loop2]; x1Q = loopPtr[2 * loop2 + 1]; z1I = ((W1I*x1I) - (W1Q*x1Q)); z1Q = ((W1I*x1Q) + (W1Q*x1I)); #ifdef FFT_SCALE y0I = (z0I + z1I)/2; y0Q = (z0Q + z1Q)/2; y1I = (z0I - z1I)/2; y1Q = (z0Q - z1Q)/2; #else y0I = (z0I + z1I); y0Q = (z0Q + z1Q); y1I = (z0I - z1I); y1Q = (z0Q - z1Q); #endif loopPtr[0] = y0I; loopPtr[1] = y0Q; loopPtr += 2*loop2; loopPtr[0] = y1I; loopPtr[1] = y1Q; loopPtr += 2*loop2; } loopPtr += 2 - 4 * loop2*loop1; } loop1 >>= 1; loop2 <<= 1; coeffStride >>= 1; } } #ifdef FFT_STRIDE void radix2IfftStride(INT16* pBfr, int Nfft, const INT16* pCoeff, int lenCoeff) { int ii, jj, mm, nn; int loop1, loop2, coeffStride; INT16 x1I, x1Q, W1I, W1Q; INT16 z0I, z0Q, z1I, z1Q; INT16 y0I, y0Q, y1I, y1Q; unsigned int loadCoeff; unsigned int *coeffPtr; unsigned int loadTemp, storeTemp; unsigned int *loopPtr; unsigned int tempVal0, tempVal1; unsigned int *dataPtr = (unsigned int *)pBfr; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN - log2Nfft; // bit-reversal permutation for (ii = 0;ii> bitShift; if (jj > ii) { // swap [ii] for [jj] tempVal0 = dataPtr[NUM_PARALLEL*jj]; tempVal1 = dataPtr[NUM_PARALLEL*ii]; dataPtr[NUM_PARALLEL*ii] = tempVal0; dataPtr[NUM_PARALLEL*jj] = tempVal1; } } loop1 = Nfft >> 1; loop2 = 1; // radix-2 stages coeffStride = lenCoeff >> 1; for (ii = 0;ii> 16); for (nn = loop1;nn>0;nn--) { loadTemp = loopPtr[0]; z0I = (loadTemp & 0xffff); z0Q = (loadTemp >> 16); loadTemp = loopPtr[NUM_PARALLEL*loop2]; x1I = (loadTemp & 0xffff); x1Q = (loadTemp >> 16); #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg(radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); // r6 z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator(radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); y0I = radix4Fft16BitAddShift(z0I, z1I); y0Q = radix4Fft16BitAddShift(z0Q, z1Q); y1I = radix4Fft16BitSubtractShift(z0I, z1I); y1Q = radix4Fft16BitSubtractShift(z0Q, z1Q); #else z1I = ((((INT32)W1I*x1I) - ((INT32)W1Q*x1Q)) >> 15); z1Q = ((((INT32)W1I*x1Q) + ((INT32)W1Q*x1I)) >> 15); #ifdef FFT_SCALE y0I = (z0I + z1I) >> 1; y0Q = (z0Q + z1Q) >> 1; y1I = (z0I - z1I) >> 1; y1Q = (z0Q - z1Q) >> 1; #else y0I = (z0I + z1I); y0Q = (z0Q + z1Q); y1I = (z0I - z1I); y1Q = (z0Q - z1Q); #endif #endif storeTemp = (unsigned short)y0Q; storeTemp <<= 16; storeTemp |= (unsigned short)y0I; *loopPtr = storeTemp; loopPtr += NUM_PARALLEL*loop2; storeTemp = (unsigned short)y1Q; storeTemp <<= 16; storeTemp |= (unsigned short)y1I; *loopPtr = storeTemp; loopPtr += NUM_PARALLEL*loop2; } loopPtr += NUM_PARALLEL * (1 - 2 * loop2*loop1); } loop1 >>= 1; loop2 <<= 1; coeffStride >>= 1; } } #endif #ifdef FFT_PARALLEL void radix2IfftParallel(INT16* pBfr, int Nfft, const INT16* pCoeff, int lenCoeff) { int ii, jj, qq, mm, nn; int loop1, loop2, coeffStride; INT16 x1I, x1Q, W1I, W1Q; INT16 z0I, z0Q, z1I, z1Q; INT16 y0I, y0Q, y1I, y1Q; unsigned int loadCoeff; unsigned int *coeffPtr; unsigned int loadTemp, storeTemp; unsigned int *loopPtr; unsigned int tempVal0, tempVal1; unsigned int *dataPtr = (unsigned int *)pBfr; #if defined(ARM_GCC) || defined(ARM_DS5) || defined(ARM_966) int log2Nfft = 31 - __clz(Nfft); #else int log2Nfft = myMsb(Nfft); #endif int bitShift = IDX_LEN - log2Nfft; // bit-reversal permutation for (ii = 0;ii> bitShift; if (jj > ii) { // swap [ii] for [jj] for (qq = 0;qq < NUM_PARALLEL;qq++) { tempVal0 = dataPtr[NUM_PARALLEL*jj + qq]; tempVal1 = dataPtr[NUM_PARALLEL*ii + qq]; dataPtr[NUM_PARALLEL*ii + qq] = tempVal0; dataPtr[NUM_PARALLEL*jj + qq] = tempVal1; } } } loop1 = Nfft >> 1; loop2 = 1; // radix-2 stages coeffStride = lenCoeff >> 1; for (ii = 0;ii> 16); for (nn = loop1;nn>0;nn--) { for (qq = 0;qq < NUM_PARALLEL;qq++) { loadTemp = loopPtr[qq]; z0I = (loadTemp & 0xffff); z0Q = (loadTemp >> 16); loadTemp = loopPtr[NUM_PARALLEL*loop2 + qq]; x1I = (loadTemp & 0xffff); x1Q = (loadTemp >> 16); z1I = ((((INT32)W1I*x1I) - ((INT32)W1Q*x1Q)) >> 15); z1Q = ((((INT32)W1I*x1Q) + ((INT32)W1Q*x1I)) >> 15); y0I = (z0I + z1I) >> 1; y0Q = (z0Q + z1Q) >> 1; y1I = (z0I - z1I) >> 1; y1Q = (z0Q - z1Q) >> 1; storeTemp = (unsigned short)y0Q; storeTemp <<= 16; storeTemp |= (unsigned short)y0I; loopPtr[qq] = storeTemp; storeTemp = (unsigned short)y1Q; storeTemp <<= 16; storeTemp |= (unsigned short)y1I; loopPtr[NUM_PARALLEL*loop2 + qq] = storeTemp; } loopPtr += 2*NUM_PARALLEL*loop2; } loopPtr += NUM_PARALLEL * (1 - 2 * loop2*loop1); } loop1 >>= 1; loop2 <<= 1; coeffStride >>= 1; } } #endif // size 64 FFT with only four non-zero inputs, pruned down void radix4Fft4in64(unsigned int *loadPtr, unsigned int *fftOutBfr, const INT16* pCoeff, int lenCoeff){ //int Nfft = 64; int mm, coeffStride = 0; int coeffStrideTemp = lenCoeff>>(2*3); unsigned short *pDst = (unsigned short*)fftOutBfr; #if defined(ARM_GCC) || defined(ARM_DS5) int coeffStrideDw; //int coeffStrideTemp = lenCoeff>>3; unsigned int z0 = loadPtr[0]; unsigned int x1 = loadPtr[1]; unsigned int x2 = loadPtr[2]; unsigned int x3 = loadPtr[3]; #ifdef ARM_DS5 INT16* pCoeffTemp; int reg5, reg6, reg7, reg8, reg9; #endif #else INT16 *pSrc = (INT16*)loadPtr; INT16 x1I, x1Q, x2I, x2Q, x3I, x3Q; // x0I, x0Q, INT16 z0I, z0Q, z1I, z1Q, z2I, z2Q, z3I, z3Q; INT16 y0I, y0Q, y1I, y1Q, y2I, y2Q, y3I, y3Q; INT16 AI, AQ, BI, BQ, CI, CQ, DI, DQ; INT16 W1I, W1Q, W2I, W2Q, W3I, W3Q; // W0I, W0Q, z0I = pSrc[0]; z0Q = pSrc[1]; x1I = pSrc[2]; x1Q = pSrc[2+1]; x2I = pSrc[4]; x2Q = pSrc[4+1]; x3I = pSrc[6]; x3Q = pSrc[6+1]; #endif for(mm=0;mm<16;mm++){ #if defined(ARM_GCC) || defined(ARM_DS5) coeffStrideDw = 4*coeffStride; #ifdef ARM_GCC asm volatile( "LDR r5, [%[pCoeff], %[coeffStrideDw]]! \n\t" "SMUAD r8, r5, %[x1] \n\t" "SMUSDX r9, r5, %[x1] \n\t" "LDR r5, [%[pCoeff], %[coeffStrideDw]]! \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r6, r9, r8, ASR #15 \n\t" "SMUAD r8, r5, %[x2] \n\t" "SMUSDX r9, r5, %[x2] \n\t" "LDR r5, [%[pCoeff], %[coeffStrideDw]] \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r7, r9, r8, ASR #15 \n\t" "SMUAD r8, r5, %[x3] \n\t" "SMUSDX r9, r5, %[x3] \n\t" "LSL r9, r9, #1 \n\t" "PKHTB r5, r9, r8, ASR #15 \n\t" "QADD16 r8, %[z0], r7 \n\t" "QSUB16 r9, %[z0], r7 \n\t" "QADD16 r7, r6, r5 \n\t" "QSUB16 r6, r6, r5 \n\t" "SHADD16 r5, r8, r7 \n\t" "SHSUB16 r7, r8, r7 \n\t" "SMUAD r5, r5, r5 \n\t" "SMUAD r7, r7, r7 \n\t" "ASR r5, r5, #13 \n\t" "ASR r7, r7, #13 \n\t" "SHASX r8, r9, r6 \n\t" "SHSAX r6, r9, r6 \n\t" "SMUAD r6, r6, r6 \n\t" "SMUAD r8, r8, r8 \n\t" "ASR r6, r6, #13 \n\t" "ASR r8, r8, #13 \n\t" "STRH r7, [%[pDst]], #32 \n\t" "STRH r8, [%[pDst]], #32 \n\t" "STRH r5, [%[pDst]], #32 \n\t" "STRH r6, [%[pDst]], #32 \n\t" : [pDst]"+r"(pDst) : [pCoeff]"r"(pCoeff), [coeffStrideDw]"r"(coeffStrideDw), [z0]"r"(z0), [x1]"r"(x1), [x2]"r"(x2), [x3]"r"(x3) : "r5", "r6", "r7", "r8", "r9"); #else pCoeffTemp = pCoeff; __asm volatile { LDR reg5, [pCoeffTemp, coeffStrideDw]! SMUAD reg8, reg5, x1 SMUSDX reg9, reg5, x1 LDR reg5, [pCoeffTemp, coeffStrideDw]! LSL reg9, reg9, #1 PKHTB reg6, reg9, reg8, ASR #15 SMUAD reg8, reg5, x2 SMUSDX reg9, reg5, x2 LDR reg5, [pCoeffTemp, coeffStrideDw] LSL reg9, reg9, #1 PKHTB reg7, reg9, reg8, ASR #15 SMUAD reg8, reg5, x3 SMUSDX reg9, reg5, x3 LSL reg9, reg9, #1 PKHTB reg5, reg9, reg8, ASR #15 QADD16 reg8, z0, reg7 QSUB16 reg9, z0, reg7 QADD16 reg7, reg6, reg5 QSUB16 reg6, reg6, reg5 SHADD16 reg5, reg8, reg7 SHSUB16 reg7, reg8, reg7 SMUAD reg5, reg5, reg5 SMUAD reg7, reg7, reg7 ASR reg5, reg5, #13 ASR reg7, reg7, #13 SHASX reg8, reg9, reg6 SHSAX reg6, reg9, reg6 SMUAD reg6, reg6, reg6 SMUAD reg8, reg8, reg8 ASR reg6, reg6, #13 ASR reg8, reg8, #13 STRH reg7, [pDst], #32 STRH reg8, [pDst], #32 STRH reg5, [pDst], #32 STRH reg6, [pDst], #32 } #endif pDst -= 4*16; #else //W0I = pCoeff[0]; W0Q = pCoeff[1]; W1I = pCoeff[2*coeffStride]; W1Q = pCoeff[2*coeffStride+1]; W2I = pCoeff[4*coeffStride]; W2Q = pCoeff[4*coeffStride+1]; W3I = pCoeff[6*coeffStride]; W3Q = pCoeff[6*coeffStride+1]; // z0 = W0*x0, z1 = W1*x1, z2 = W2*x2, z3 = W3*x3 assuming W0Q = -exp(1i*2*pi*nk/N) #ifdef ARM_DEBUG z1I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W1I, x1I), radix4Fft16BitTo32BitMultiplier(W1Q, x1Q))); z1Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W1I, x1Q), radix4Fft16BitTo32BitMultiplier(W1Q, x1I))); z2I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W2I, x2I), radix4Fft16BitTo32BitMultiplier(W2Q, x2Q))); z2Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W2I, x2Q), radix4Fft16BitTo32BitMultiplier(W2Q, x2I))); z3I = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(W3I, x3I), radix4Fft16BitTo32BitMultiplier(W3Q, x3Q))); z3Q = radix4Fft32BitTo16BitRounding(radix4Fft32BitAccumulatorNeg( radix4Fft16BitTo32BitMultiplier(W3I, x3Q), radix4Fft16BitTo32BitMultiplier(W3Q, x3I))); AI = radix4Fft16BitAdd(z0I, z2I); AQ = radix4Fft16BitAdd(z0Q, z2Q); BI = radix4Fft16BitSubtract(z0I, z2I); BQ = radix4Fft16BitSubtract(z0Q, z2Q); CI = radix4Fft16BitAdd(z1I, z3I); CQ = radix4Fft16BitAdd(z1Q, z3Q); DI = radix4Fft16BitSubtract(z1I, z3I); DQ = radix4Fft16BitSubtract(z1Q, z3Q); y0I = radix4Fft16BitAddShift(AI, CI); y0Q = radix4Fft16BitAddShift(AQ, CQ); y1I = radix4Fft16BitAddShift(BI, DQ); y1Q = radix4Fft16BitSubtractShift(BQ, DI); y2I = radix4Fft16BitSubtractShift(AI, CI); y2Q = radix4Fft16BitSubtractShift(AQ, CQ); y3I = radix4Fft16BitSubtractShift(BI, DQ); y3Q = radix4Fft16BitAddShift(BQ, DI); pDst[2*16] = radix4Fft32BitTo16BitUnsignedRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(y0I, y0I), radix4Fft16BitTo32BitMultiplier(y0Q, y0Q)),-2); pDst[3*16] = radix4Fft32BitTo16BitUnsignedRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(y1I, y1I), radix4Fft16BitTo32BitMultiplier(y1Q, y1Q)),-2); pDst[0] = radix4Fft32BitTo16BitUnsignedRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(y2I, y2I), radix4Fft16BitTo32BitMultiplier(y2Q, y2Q)),-2); pDst[16] = radix4Fft32BitTo16BitUnsignedRounding(radix4Fft32BitAccumulator( radix4Fft16BitTo32BitMultiplier(y3I, y3I), radix4Fft16BitTo32BitMultiplier(y3Q, y3Q)),-2); #else z1I = (INT16)( (((INT32)W1I*x1I)+((INT32)W1Q*x1Q))>>15 ); z1Q = (INT16)( (((INT32)W1I*x1Q)-((INT32)W1Q*x1I))>>15 ); z2I = (INT16)( (((INT32)W2I*x2I)+((INT32)W2Q*x2Q))>>15 ); z2Q = (INT16)( (((INT32)W2I*x2Q)-((INT32)W2Q*x2I))>>15 ); z3I = (INT16)( (((INT32)W3I*x3I)+((INT32)W3Q*x3Q))>>15 ); z3Q = (INT16)( (((INT32)W3I*x3Q)-((INT32)W3Q*x3I))>>15 ); AI = (z0I+z2I); AQ = (z0Q+z2Q); BI = (z0I-z2I); BQ = (z0Q-z2Q); CI = (z1I+z3I); CQ = (z1Q+z3Q); DI = (z1I-z3I); DQ = (z1Q-z3Q); y0I = (AI+CI)>>1; y0Q = (AQ+CQ)>>1; y1I = (BI+DQ)>>1; y1Q = (BQ-DI)>>1; y2I = (AI-CI)>>1; y2Q = (AQ-CQ)>>1; y3I = (BI-DQ)>>1; y3Q = (BQ+DI)>>1; pDst[2*16] = (INT16) ( (((INT32)y0I*y0I)+((INT32)y0Q*y0Q))>>13 ); pDst[3*16] = (INT16) ( (((INT32)y1I*y1I)+((INT32)y1Q*y1Q))>>13 ); pDst[0] = (INT16) ( (((INT32)y2I*y2I)+((INT32)y2Q*y2Q))>>13 ); pDst[16] = (INT16) ( (((INT32)y3I*y3I)+((INT32)y3Q*y3Q))>>13 ); #endif #endif //printf("%d | %d | %d | %d\n",pDst[0],pDst[16],pDst[2*16],pDst[3*16]); //printf("%d\n",(unsigned int)pDst); pDst ++; coeffStride += coeffStrideTemp; } } #endif /* CONFIG_WLS_CSI_PROC */