summaryrefslogtreecommitdiff
path: root/bfd/elf-m10300.c
diff options
context:
space:
mode:
authorNick Clifton <nickc@redhat.com>2007-11-21 12:06:26 +0000
committerNick Clifton <nickc@redhat.com>2007-11-21 12:06:26 +0000
commit1c3ae938389de1cfd57a359ffa0f079d84a1ffbb (patch)
tree5ee6a238dad31848bc3556a44c2f60b75319a8c9 /bfd/elf-m10300.c
parent370828a8b95aec1163dc4303d47f3f4d7f70e6a0 (diff)
downloadbinutils-redhat-1c3ae938389de1cfd57a359ffa0f079d84a1ffbb.tar.gz
* elf-m10300.c (mn10300_elf_relax_section): Allow for alignment relocs when
computing whether instructions can be relaxed. * ld-mn10300/i135409-4.s: New test case. Check for relaxation to a 16-bit jump instruction. * ld-mn10300/i135409-4.t: Linker script for the new test. * ld-mn10300/i135409-4.d: Expected disassembly of new test. * ld-mn10300/mn10300.exp: Run the new test.
Diffstat (limited to 'bfd/elf-m10300.c')
-rw-r--r--bfd/elf-m10300.c33
1 files changed, 32 insertions, 1 deletions
diff --git a/bfd/elf-m10300.c b/bfd/elf-m10300.c
index 77c8caee34..2695217f4e 100644
--- a/bfd/elf-m10300.c
+++ b/bfd/elf-m10300.c
@@ -2105,6 +2105,7 @@ mn10300_elf_relax_section (bfd *abfd,
Elf_Internal_Sym *isymbuf = NULL;
struct elf32_mn10300_link_hash_table *hash_table;
asection *section = sec;
+ bfd_vma align_gap_adjustment;
/* Assume nothing changes. */
*again = FALSE;
@@ -2718,6 +2719,33 @@ mn10300_elf_relax_section (bfd *abfd,
if (internal_relocs == NULL)
goto error_return;
+ /* Scan for worst case alignment gap changes. Note that this logic
+ is not ideal; what we should do is run this scan for every
+ opcode/address range and adjust accordingly, but that's
+ expensive. Worst case is that for an alignment of N bytes, we
+ move by 2*N-N-1 bytes, assuming we have aligns of 1, 2, 4, 8, etc
+ all before it. Plus, this still doesn't cover cross-section
+ jumps with section alignment. */
+ irelend = internal_relocs + sec->reloc_count;
+ align_gap_adjustment = 0;
+ for (irel = internal_relocs; irel < irelend; irel++)
+ {
+ if (ELF32_R_TYPE (irel->r_info) == (int) R_MN10300_ALIGN)
+ {
+ bfd_vma adj = 1 << irel->r_addend;
+ bfd_vma aend = irel->r_offset;
+
+ aend = BFD_ALIGN (aend, 1 << irel->r_addend);
+ adj = 2*adj - adj - 1;
+
+ /* Record the biggest adjustmnet. Skip any alignment at the
+ end of our section. */
+ if (align_gap_adjustment < adj
+ && aend < sec->output_section->vma + sec->output_offset + sec->size)
+ align_gap_adjustment = adj;
+ }
+ }
+
/* Walk through them looking for relaxing opportunities. */
irelend = internal_relocs + sec->reloc_count;
for (irel = internal_relocs; irel < irelend; irel++)
@@ -2933,7 +2961,10 @@ mn10300_elf_relax_section (bfd *abfd,
/* See if the value will fit in 16 bits, note the high value is
0x7fff + 2 as the target will be two bytes closer if we are
able to relax. */
- if ((long) value < 0x8001 && (long) value > -0x8000)
+ /* Account for jumps across alignment boundaries using
+ align_gap_adjustment. */
+ if (value < 0x8001 - align_gap_adjustment
+ && ((bfd_signed_vma) value > -0x8000 + (bfd_signed_vma) align_gap_adjustment))
{
unsigned char code;