+
+ if (sort && count != 0)
+ {
+ bfd_vma (*ext_r_off) (const void *);
+ bfd_vma r_off;
+ size_t elt_size;
+ bfd_byte *base, *end, *p, *loc;
+ bfd_byte *buf = NULL;
+
+ if (bed->s->arch_size == 32)
+ {
+ if (abfd->xvec->header_byteorder == BFD_ENDIAN_LITTLE)
+ ext_r_off = ext32l_r_offset;
+ else if (abfd->xvec->header_byteorder == BFD_ENDIAN_BIG)
+ ext_r_off = ext32b_r_offset;
+ else
+ abort ();
+ }
+ else
+ {
+#ifdef BFD_HOST_64_BIT
+ if (abfd->xvec->header_byteorder == BFD_ENDIAN_LITTLE)
+ ext_r_off = ext64l_r_offset;
+ else if (abfd->xvec->header_byteorder == BFD_ENDIAN_BIG)
+ ext_r_off = ext64b_r_offset;
+ else
+#endif
+ abort ();
+ }
+
+ /* Must use a stable sort here. A modified insertion sort,
+ since the relocs are mostly sorted already. */
+ elt_size = reldata->hdr->sh_entsize;
+ base = reldata->hdr->contents;
+ end = base + count * elt_size;
+ if (elt_size > sizeof (Elf64_External_Rela))
+ abort ();
+
+ /* Ensure the first element is lowest. This acts as a sentinel,
+ speeding the main loop below. */
+ r_off = (*ext_r_off) (base);
+ for (p = loc = base; (p += elt_size) < end; )
+ {
+ bfd_vma r_off2 = (*ext_r_off) (p);
+ if (r_off > r_off2)
+ {
+ r_off = r_off2;
+ loc = p;
+ }
+ }
+ if (loc != base)
+ {
+ /* Don't just swap *base and *loc as that changes the order
+ of the original base[0] and base[1] if they happen to
+ have the same r_offset. */
+ bfd_byte onebuf[sizeof (Elf64_External_Rela)];
+ memcpy (onebuf, loc, elt_size);
+ memmove (base + elt_size, base, loc - base);
+ memcpy (base, onebuf, elt_size);
+ }
+
+ for (p = base + elt_size; (p += elt_size) < end; )
+ {
+ /* base to p is sorted, *p is next to insert. */
+ r_off = (*ext_r_off) (p);
+ /* Search the sorted region for location to insert. */
+ loc = p - elt_size;
+ while (r_off < (*ext_r_off) (loc))
+ loc -= elt_size;
+ loc += elt_size;
+ if (loc != p)
+ {
+ /* Chances are there is a run of relocs to insert here,
+ from one of more input files. Files are not always
+ linked in order due to the way elf_link_input_bfd is
+ called. See pr17666. */
+ size_t sortlen = p - loc;
+ bfd_vma r_off2 = (*ext_r_off) (loc);
+ size_t runlen = elt_size;
+ size_t buf_size = 96 * 1024;
+ while (p + runlen < end
+ && (sortlen <= buf_size
+ || runlen + elt_size <= buf_size)
+ && r_off2 > (*ext_r_off) (p + runlen))
+ runlen += elt_size;
+ if (buf == NULL)
+ {
+ buf = bfd_malloc (buf_size);
+ if (buf == NULL)
+ return FALSE;
+ }
+ if (runlen < sortlen)
+ {
+ memcpy (buf, p, runlen);
+ memmove (loc + runlen, loc, sortlen);
+ memcpy (loc, buf, runlen);
+ }
+ else
+ {
+ memcpy (buf, loc, sortlen);
+ memmove (loc, p, runlen);
+ memcpy (loc + runlen, buf, sortlen);
+ }
+ p += runlen - elt_size;
+ }
+ }
+ /* Hashes are no longer valid. */
+ free (reldata->hashes);
+ reldata->hashes = NULL;
+ free (buf);
+ }
+ return TRUE;