diff --git a/lld/wasm/InputChunks.cpp b/lld/wasm/InputChunks.cpp --- a/lld/wasm/InputChunks.cpp +++ b/lld/wasm/InputChunks.cpp @@ -105,8 +105,13 @@ llvm_unreachable("unknown relocation type"); } - if (bytesRead && bytesRead != paddedLEBWidth) - warn("expected LEB at relocation site be 5/10-byte padded"); + if (rel.Type == R_WASM_TABLE_NUMBER_LEB) { + if (bytesRead < 1 || bytesRead > 5) + warn("expected LEB at relocation site be 1-5 bytes long"); + } else { + if (bytesRead && bytesRead != paddedLEBWidth) + warn("expected LEB at relocation site be 5/10-byte padded"); + } if (rel.Type != R_WASM_GLOBAL_INDEX_LEB && rel.Type != R_WASM_GLOBAL_INDEX_I32) { @@ -153,9 +158,14 @@ case R_WASM_GLOBAL_INDEX_LEB: case R_WASM_EVENT_INDEX_LEB: case R_WASM_MEMORY_ADDR_LEB: - case R_WASM_TABLE_NUMBER_LEB: encodeULEB128(value, loc, 5); break; + case R_WASM_TABLE_NUMBER_LEB: { + uint32_t bytesRead = 0; + decodeULEB128(loc, &bytesRead); + encodeULEB128(value, loc, bytesRead); + break; + } case R_WASM_MEMORY_ADDR_LEB64: encodeULEB128(value, loc, 10); break; @@ -254,7 +264,6 @@ case R_WASM_GLOBAL_INDEX_LEB: case R_WASM_EVENT_INDEX_LEB: case R_WASM_MEMORY_ADDR_LEB: - case R_WASM_TABLE_NUMBER_LEB: case R_WASM_TABLE_INDEX_SLEB: case R_WASM_MEMORY_ADDR_SLEB: return 5; @@ -272,6 +281,21 @@ return writeCompressedReloc(buf, rel, value); } +static bool relocHasPaddedWidth(const WasmRelocation &rel) { + return rel.Type != R_WASM_TABLE_NUMBER_LEB; +} + +static uint32_t getRelocInputWidth(const uint8_t *buf, + const WasmRelocation &rel) { + if (relocHasPaddedWidth(rel)) { + return getRelocWidthPadded(rel); + } else { + uint32_t relocSize = 0; + decodeULEB128(buf + rel.Offset, &relocSize); + return relocSize; + } +} + // Relocations of type LEB and SLEB in the code section are padded to 5 bytes // so that a fast linker can blindly overwrite them without needing to worry // about the number of bytes needed to encode the values. @@ -303,7 +327,7 @@ LLVM_DEBUG(dbgs() << " region: " << (rel.Offset - lastRelocEnd) << "\n"); compressedFuncSize += rel.Offset - lastRelocEnd; compressedFuncSize += getRelocWidth(rel, file->calcNewValue(rel, tombstone)); - lastRelocEnd = rel.Offset + getRelocWidthPadded(rel); + lastRelocEnd = rel.Offset + getRelocInputWidth(secStart, rel); } LLVM_DEBUG(dbgs() << " final region: " << (end - lastRelocEnd) << "\n"); compressedFuncSize += end - lastRelocEnd; @@ -344,7 +368,7 @@ memcpy(buf, lastRelocEnd, chunkSize); buf += chunkSize; buf += writeCompressedReloc(buf, rel, file->calcNewValue(rel, tombstone)); - lastRelocEnd = secStart + rel.Offset + getRelocWidthPadded(rel); + lastRelocEnd = secStart + rel.Offset + getRelocInputWidth(secStart, rel); } unsigned chunkSize = end - lastRelocEnd;