1/* SPDX-License-Identifier: BSD-2-Clause */
2/*
3 * Copyright (c) 2019, Linaro Limited
4 * Copyright (c) 2020, Arm Limited
5 */
6
7#include <asm.S>
8#include <elf_common.h>
9
10/*
11 * _start() - Entry of ldelf
12 *
13 * See include/ldelf.h for details on TEE Core interaction.
14 *
15 * void start(struct ldelf_arg *arg);
16 */
17FUNC _ldelf_start , :
18	/*
19	 * First ldelf needs to be relocated. The binary is compiled to
20	 * contain only a minimal number of R_AARCH64_RELATIVE relocations
21	 * in read/write memory, leaving read-only and executeble memory
22	 * untouched.
23	 */
24	adr	x4, reloc_begin_rel
25	ldr	w5, reloc_begin_rel
26	ldr	w6, reloc_end_rel
27	add	x5, x5, x4
28	add	x6, x6, x4
29	cmp	x5, x6
30	beq	2f
31
32	adr	x4, _ldelf_start	/* Get the load offset */
33
34	/* Loop over the relocations (Elf64_Rela) and process all entries */
351:	ldp	x7, x8, [x5], #16	/* x7 = r_offset, x8 = r_info */
36	ldr	x9, [x5], #8		/* x9 = r_addend */
37	and	x8, x8, #0xffffffff
38	cmp	x8, #R_AARCH64_RELATIVE
39	/* We're currently only supporting R_AARCH64_RELATIVE relocations */
40	bne	3f
41
42	/*
43	 * Update the pointer at r_offset + load_offset with r_addend +
44	 * load_offset.
45	 */
46	add	x7, x7, x4
47	add	x9, x9, x4
48	str	x9, [x7]
49
50	cmp	x5, x6
51	blo	1b
52
532:	bl	ldelf
54	mov	x0, #0
55	bl	_ldelf_return
563:	mov	x0, #0
57	bl	_ldelf_panic
58reloc_begin_rel:
59    .word __reloc_begin - reloc_begin_rel
60reloc_end_rel:
61    .word __reloc_end - reloc_end_rel
62END_FUNC _ldelf_start
63
64BTI(emit_aarch64_feature_1_and     GNU_PROPERTY_AARCH64_FEATURE_1_BTI)
65