Update ARM and x86 ArchHandler to check switch completeness. NFC
Define an explicit type for arch specific reference kinds to make sure all cases are covered in switch statements. llvm-svn: 229314
This commit is contained in:
parent
5a6375c3ba
commit
9c222630e4
|
@ -138,7 +138,7 @@ private:
|
|||
static const Registry::KindStrings _sKindStrings[];
|
||||
static const StubInfo _sStubInfoArmPIC;
|
||||
|
||||
enum : Reference::KindValue {
|
||||
enum Arm_Kinds : Reference::KindValue {
|
||||
invalid, /// for error condition
|
||||
|
||||
modeThumbCode, /// Content starting at this offset is thumb.
|
||||
|
@ -207,6 +207,7 @@ ArchHandler_arm::ArchHandler_arm() { }
|
|||
ArchHandler_arm::~ArchHandler_arm() { }
|
||||
|
||||
const Registry::KindStrings ArchHandler_arm::_sKindStrings[] = {
|
||||
LLD_KIND_STRING_ENTRY(invalid),
|
||||
LLD_KIND_STRING_ENTRY(modeThumbCode),
|
||||
LLD_KIND_STRING_ENTRY(modeArmCode),
|
||||
LLD_KIND_STRING_ENTRY(modeData),
|
||||
|
@ -912,7 +913,7 @@ void ArchHandler_arm::applyFixupFinal(const Reference &ref, uint8_t *loc,
|
|||
int32_t displacement;
|
||||
uint16_t value16;
|
||||
uint32_t value32;
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
thumbMode = true;
|
||||
break;
|
||||
|
@ -1068,13 +1069,16 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
uint64_t inAtomAddress,
|
||||
bool &thumbMode,
|
||||
bool targetIsThumb) {
|
||||
if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
|
||||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::ARM);
|
||||
bool useExternalReloc = useExternalRelocationTo(*ref.target());
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
int32_t displacement;
|
||||
uint16_t value16;
|
||||
uint32_t value32;
|
||||
bool targetIsUndef = isa<UndefinedAtom>(ref.target());
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
thumbMode = true;
|
||||
break;
|
||||
|
@ -1168,7 +1172,7 @@ void ArchHandler_arm::applyFixupRelocatable(const Reference &ref, uint8_t *loc,
|
|||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
case invalid:
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
break;
|
||||
}
|
||||
|
@ -1190,11 +1194,10 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
uint32_t targetAtomAddress;
|
||||
uint32_t fromAtomAddress;
|
||||
uint16_t other16;
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<Arm_Kinds>(ref.kindValue())) {
|
||||
case modeThumbCode:
|
||||
case modeArmCode:
|
||||
case modeData:
|
||||
break;
|
||||
// Do nothing.
|
||||
break;
|
||||
case thumb_b22:
|
||||
|
@ -1382,7 +1385,7 @@ void ArchHandler_arm::appendSectionRelocations(
|
|||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
case invalid:
|
||||
llvm_unreachable("invalid ARM Reference Kind");
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ private:
|
|||
static const Registry::KindStrings _sKindStrings[];
|
||||
static const StubInfo _sStubInfo;
|
||||
|
||||
enum : Reference::KindValue {
|
||||
enum X86_Kinds : Reference::KindValue {
|
||||
invalid, /// for error condition
|
||||
|
||||
modeCode, /// Content starting at this offset is code.
|
||||
|
@ -441,7 +441,7 @@ void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *loc,
|
|||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::x86);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case branch32:
|
||||
*loc32 = (targetAddress - (fixupAddress + 4)) + ref.addend();
|
||||
break;
|
||||
|
@ -469,7 +469,7 @@ void ArchHandler_x86::applyFixupFinal(const Reference &ref, uint8_t *loc,
|
|||
case lazyImmediateLocation:
|
||||
*loc32 = ref.addend();
|
||||
break;
|
||||
default:
|
||||
case invalid:
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
break;
|
||||
}
|
||||
|
@ -480,10 +480,13 @@ void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
|
|||
uint64_t fixupAddress,
|
||||
uint64_t targetAddress,
|
||||
uint64_t inAtomAddress) {
|
||||
if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
|
||||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::x86);
|
||||
bool useExternalReloc = useExternalRelocationTo(*ref.target());
|
||||
ulittle16_t *loc16 = reinterpret_cast<ulittle16_t *>(loc);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case branch32:
|
||||
if (useExternalReloc)
|
||||
*loc32 = ref.addend() - (fixupAddress + 4);
|
||||
|
@ -515,7 +518,7 @@ void ArchHandler_x86::applyFixupRelocatable(const Reference &ref,
|
|||
case lazyImmediateLocation:
|
||||
// do nothing
|
||||
break;
|
||||
default:
|
||||
case invalid:
|
||||
llvm_unreachable("invalid x86 Reference Kind");
|
||||
break;
|
||||
}
|
||||
|
@ -556,7 +559,7 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
assert(ref.kindArch() == Reference::KindArch::x86);
|
||||
uint32_t sectionOffset = atomSectionOffset + ref.offsetInAtom();
|
||||
bool useExternalReloc = useExternalRelocationTo(*ref.target());
|
||||
switch (ref.kindValue()) {
|
||||
switch (static_cast<X86_Kinds>(ref.kindValue())) {
|
||||
case modeCode:
|
||||
case modeData:
|
||||
break;
|
||||
|
@ -624,10 +627,9 @@ void ArchHandler_x86::appendSectionRelocations(
|
|||
case lazyImmediateLocation:
|
||||
llvm_unreachable("lazy reference kind implies Stubs pass was run");
|
||||
break;
|
||||
default:
|
||||
case invalid:
|
||||
llvm_unreachable("unknown x86 Reference Kind");
|
||||
break;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -475,11 +475,11 @@ void ArchHandler_x86_64::applyFixupFinal(
|
|||
const Reference &ref, uint8_t *loc, uint64_t fixupAddress,
|
||||
uint64_t targetAddress, uint64_t inAtomAddress, uint64_t imageBaseAddress,
|
||||
FindAddressForAtom findSectionAddress) {
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
|
||||
if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
|
||||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::x86_64);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
|
||||
switch (static_cast<X86_64_Kinds>(ref.kindValue())) {
|
||||
case branch32:
|
||||
case ripRel32:
|
||||
|
@ -548,6 +548,9 @@ void ArchHandler_x86_64::applyFixupRelocatable(const Reference &ref,
|
|||
uint64_t fixupAddress,
|
||||
uint64_t targetAddress,
|
||||
uint64_t inAtomAddress) {
|
||||
if (ref.kindNamespace() != Reference::KindNamespace::mach_o)
|
||||
return;
|
||||
assert(ref.kindArch() == Reference::KindArch::x86_64);
|
||||
ulittle32_t *loc32 = reinterpret_cast<ulittle32_t *>(loc);
|
||||
ulittle64_t *loc64 = reinterpret_cast<ulittle64_t *>(loc);
|
||||
switch (static_cast<X86_64_Kinds>(ref.kindValue())) {
|
||||
|
|
Loading…
Reference in New Issue