1 /* 2 Test for LoadStore exception handlers. This test performs unaligned load and store in 32bit aligned addresses 3 */ 4 5 #include <esp_types.h> 6 #include <stdio.h> 7 #include <esp_heap_caps.h> 8 #include "sdkconfig.h" 9 #include "esp_system.h" 10 #include "unity.h" 11 12 #if CONFIG_IDF_TARGET_ARCH_XTENSA 13 #include "freertos/xtensa_api.h" 14 15 #ifdef CONFIG_ESP32_IRAM_AS_8BIT_ACCESSIBLE_MEMORY 16 TEST_CASE("LoadStore Exception handler", "[freertos]") 17 { 18 int32_t val0 = 0xDEADBEEF; 19 int32_t val1 = 0xBBAA9988; 20 int32_t val2 = 0x77665544; 21 int32_t val3 = 0x33221100; 22 23 int8_t val8_0 = val0 & 0xff; 24 int8_t val8_1 = val1 & 0xff; 25 int8_t val8_2 = val2 & 0xff; 26 int8_t val8_3 = val3 & 0xff; 27 28 int16_t val16_0 = val0 & 0xffff; 29 int16_t val16_1 = val1 & 0xffff; 30 int16_t val16_2 = val2 & 0xffff; 31 int16_t val16_3 = val3 & 0xffff; 32 33 uint32_t largest_free = heap_caps_get_largest_free_block(MALLOC_CAP_IRAM_8BIT); 34 35 int8_t *arr = heap_caps_malloc(largest_free * sizeof(int8_t), MALLOC_CAP_IRAM_8BIT); 36 TEST_ASSERT(arr != NULL); 37 38 int8_t *arr8 = arr; 39 int16_t *arr16 = (int16_t *)arr; 40 int32_t *arr32 = (int32_t *)arr; 41 42 for (int i = 0; i < 1024; i++) { 43 44 // LoadStoreError 45 46 uint32_t offset = esp_random() % (largest_free - 20); 47 uint32_t offset8, offset16, offset32; 48 49 // Get word aligned offset 50 offset8 = offset & ~3; 51 offset16 = offset8 / 2; 52 offset32 = offset8 / 4; 53 54 arr8[offset8] = val8_0; 55 arr8[offset8+1] = val8_1; 56 arr8[offset8+2] = val8_2; 57 arr8[offset8+3] = val8_3; 58 59 // Just to make sure compiler doesn't read stale data 60 asm volatile("memw\n"); 61 TEST_ASSERT_EQUAL(val8_0, arr8[offset8]); 62 TEST_ASSERT_EQUAL(val8_1, arr8[offset8+1]); 63 TEST_ASSERT_EQUAL(val8_2, arr8[offset8+2]); 64 TEST_ASSERT_EQUAL(val8_3, arr8[offset8+3]); 65 66 arr16[offset16] = val16_0; 67 arr16[offset16+1] = val16_1; 68 arr16[offset16+2] = val16_2; 69 arr16[offset16+3] = val16_3; 70 71 // Just to make sure compiler doesn't read stale data 72 asm volatile("memw\n"); 73 TEST_ASSERT_EQUAL(val16_0, arr16[offset16]); 74 TEST_ASSERT_EQUAL(val16_1, arr16[offset16+1]); 75 TEST_ASSERT_EQUAL(val16_2, arr16[offset16+2]); 76 TEST_ASSERT_EQUAL(val16_3, arr16[offset16+3]); 77 78 // LoadStoreAlignement Error 79 80 // Check that it doesn't write to adjacent bytes 81 int8_t *ptr8_0 = (void *)&arr8[offset8]; 82 int8_t *ptr8_1 = (void *)&arr8[offset8] + 5; 83 int8_t *ptr8_2 = (void *)&arr8[offset8] + 10; 84 int8_t *ptr8_3 = (void *)&arr8[offset8] + 15; 85 86 *ptr8_0 = 0x73; 87 *ptr8_1 = 0x73; 88 *ptr8_2 = 0x73; 89 *ptr8_3 = 0x73; 90 91 int16_t *ptr16_0 = (void *)&arr16[offset16] + 1; 92 int16_t *ptr16_1 = (void *)&arr16[offset16] + 3; 93 94 *ptr16_0 = val16_0; 95 *ptr16_1 = val16_1; 96 97 // Just to make sure compiler doesn't read stale data 98 asm volatile("memw\n"); 99 TEST_ASSERT_EQUAL(val16_0, *ptr16_0); 100 TEST_ASSERT_EQUAL(0x73, *ptr8_0); 101 TEST_ASSERT_EQUAL(val16_1, *ptr16_1); 102 TEST_ASSERT_EQUAL(0x73, *ptr8_1); 103 104 int32_t *ptr32_0 = (void *)&arr32[offset32] + 1; 105 int32_t *ptr32_1 = (void *)&arr32[offset32] + 6; 106 int32_t *ptr32_2 = (void *)&arr32[offset32] + 11; 107 *ptr32_0 = val0; 108 *ptr32_1 = val1; 109 *ptr32_2 = val2; 110 111 // Just to make sure compiler doesn't read stale data 112 asm volatile ("memw"); 113 TEST_ASSERT_EQUAL(0x73, *ptr8_0); 114 TEST_ASSERT_EQUAL(val0, *ptr32_0); 115 TEST_ASSERT_EQUAL(0x73, *ptr8_1); 116 TEST_ASSERT_EQUAL(val1, *ptr32_1); 117 TEST_ASSERT_EQUAL(0x73, *ptr8_2); 118 TEST_ASSERT_EQUAL(val2, *ptr32_2); 119 TEST_ASSERT_EQUAL(0x73, *ptr8_3); 120 } 121 122 TEST_ASSERT_TRUE(heap_caps_check_integrity_all(true)); 123 heap_caps_free(arr); 124 } 125 #endif // CONFIG_ESP32_IRAM_AS_8BIT_ACCESSIBLE_MEMORY 126 #endif // CONFIG_IDF_TARGET_ARCH_XTENSA 127