Index: lib/builtins/clear_cache.c =================================================================== --- lib/builtins/clear_cache.c +++ lib/builtins/clear_cache.c @@ -93,24 +93,33 @@ #elif defined(__aarch64__) && !defined(__APPLE__) uint64_t xstart = (uint64_t)(uintptr_t)start; uint64_t xend = (uint64_t)(uintptr_t)end; - uint64_t addr; - // Get Cache Type Info + // Get Cache Type Info. uint64_t ctr_el0; __asm __volatile("mrs %0, ctr_el0" : "=r"(ctr_el0)); - // dc & ic instructions must use 64bit registers so we don't use + // The DC and IC instructions must use 64-bit registers so we don't use // uintptr_t in case this runs in an IPL32 environment. - const size_t dcache_line_size = 4 << ((ctr_el0 >> 16) & 15); - for (addr = xstart & ~(dcache_line_size - 1); addr < xend; - addr += dcache_line_size) - __asm __volatile("dc cvau, %0" ::"r"(addr)); + uint64_t addr; + + // If CTR_EL0.IDC is set, data cache cleaning to the point of unification + // is not required for instruction to data coherence. + if (((ctr_el0 >> 28) & 0x1) == 0x0) { + const size_t dcache_line_size = 4 << ((ctr_el0 >> 16) & 15); + for (addr = xstart & ~(dcache_line_size - 1); addr < xend; + addr += dcache_line_size) + __asm __volatile("dc cvau, %0" ::"r"(addr)); + } __asm __volatile("dsb ish"); - const size_t icache_line_size = 4 << ((ctr_el0 >> 0) & 15); - for (addr = xstart & ~(icache_line_size - 1); addr < xend; - addr += icache_line_size) - __asm __volatile("ic ivau, %0" ::"r"(addr)); + // If CTR_EL0.DIC is set, instruction cache invalidation to the point of + // unification is not required for instruction to data coherence. + if (((ctr_el0 >> 29) & 0x1) == 0x0) { + const size_t icache_line_size = 4 << ((ctr_el0 >> 0) & 15); + for (addr = xstart & ~(icache_line_size - 1); addr < xend; + addr += icache_line_size) + __asm __volatile("ic ivau, %0" ::"r"(addr)); + } __asm __volatile("isb sy"); #elif defined(__powerpc64__) const size_t line_size = 32;