Index: lib/builtins/clear_cache.c =================================================================== --- lib/builtins/clear_cache.c +++ lib/builtins/clear_cache.c @@ -19,6 +19,38 @@ #if defined(__ANDROID__) && defined(__mips__) #include + #include + #ifdef __LP64__ + /* + * clear_mips_cache - Invalidates instruction cache for Mips. + */ + void clear_mips_cache(const void* Addr, size_t Size); + asm( + ".text\n" + ".align 2\n" + ".globl clear_mips_cache\n" + "clear_mips_cache:\n" + ".set noreorder\n" + "beq $a1, $zero, 20f\n" /* If size == 0, branch around. */ + "nop\n" + "daddu $a1, $a0, $a1\n" /* Calculate end address + 1 */ + "rdhwr $v0, $1\n" /* Get step size for SYNCI */ + /* $1 is $HW_SYNCI_Step */ + "beq $v0, $zero, 20f\n" /* If no caches require synchronization, */ + /* branch around. */ + "nop\n" + "10:\n" + "synci 0($a0)\n" /* Synchronize all caches around address. */ + "daddu $a0, $a0, $v0\n" /* Add step size. */ + "sltu $v1, $a0, $a1\n" /* Compare current with end address. */ + "bne $v1, $zero, 10b\n" /* Branch if more to do. */ + "nop\n" + "sync\n" /* Clear memory hazards. */ + "20:\n" + "jr.hb $ra\n" /* Return, clearing instruction hazards. */ + "nop\n" + ); + #endif #endif #if defined(__ANDROID__) && defined(__arm__) @@ -62,7 +94,17 @@ #elif defined(__ANDROID__) && defined(__mips__) const uintptr_t start_int = (uintptr_t) start; const uintptr_t end_int = (uintptr_t) end; - _flush_cache(start, (end_int - start_int), BCACHE); + #ifdef __LP64__ + // Call synci implementation for short address range. + const uintptr_t address_range_limit = 256; + if ((end_int - start_int) <= address_range_limit) { + clear_mips_cache(start, (end_int - start_int)); + } else { + syscall(__NR_cacheflush, start, (end_int - start_int), BCACHE); + } + #else + syscall(__NR_cacheflush, start, (end_int - start_int), BCACHE); + #endif #elif defined(__aarch64__) && !defined(__APPLE__) uint64_t xstart = (uint64_t)(uintptr_t) start; uint64_t xend = (uint64_t)(uintptr_t) end;