blob: b735f45e87b739c4170500952311d41ffc48fb2d [file] [log] [blame]
Achin Gupta7c88f3f2014-02-18 18:09:12 +00001/*
Chris Kayda043412023-02-14 11:30:04 +00002 * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
Achin Gupta7c88f3f2014-02-18 18:09:12 +00003 *
dp-arm82cb2c12017-05-03 09:38:09 +01004 * SPDX-License-Identifier: BSD-3-Clause
Achin Gupta7c88f3f2014-02-18 18:09:12 +00005 */
6
Masahiro Yamada665e71b2020-03-09 17:39:48 +09007#include <common/bl_common.ld.h>
Antonio Nino Diaz09d40e02018-12-14 00:18:21 +00008#include <lib/xlat_tables/xlat_tables_defs.h>
Achin Gupta7c88f3f2014-02-18 18:09:12 +00009
10OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
11OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
Jeenu Viswambharan9f98aa12014-03-11 11:06:45 +000012ENTRY(tsp_entrypoint)
13
Achin Gupta7c88f3f2014-02-18 18:09:12 +000014MEMORY {
Sandrine Bailleux2467f702014-05-20 17:22:24 +010015 RAM (rwx): ORIGIN = TSP_SEC_MEM_BASE, LENGTH = TSP_SEC_MEM_SIZE
Achin Gupta7c88f3f2014-02-18 18:09:12 +000016}
17
Chris Kayf90fe022022-09-29 14:36:53 +010018SECTIONS {
Harrison Mutaifcb72e12023-04-19 09:30:15 +010019 RAM_REGION_START = ORIGIN(RAM);
20 RAM_REGION_LENGTH = LENGTH(RAM);
Achin Gupta7c88f3f2014-02-18 18:09:12 +000021 . = BL32_BASE;
Chris Kayf90fe022022-09-29 14:36:53 +010022
Antonio Nino Diaza2aedac2017-11-15 11:45:35 +000023 ASSERT(. == ALIGN(PAGE_SIZE),
Chris Kayf90fe022022-09-29 14:36:53 +010024 "BL32_BASE address is not aligned on a page boundary.")
Achin Gupta7c88f3f2014-02-18 18:09:12 +000025
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010026#if SEPARATE_CODE_AND_RODATA
27 .text . : {
28 __TEXT_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010029
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010030 *tsp_entrypoint.o(.text*)
31 *(.text*)
32 *(.vectors)
Chris Kayf90fe022022-09-29 14:36:53 +010033
Roberto Vargas5629b2b2018-04-11 11:53:31 +010034 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010035
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010036 __TEXT_END__ = .;
37 } >RAM
38
39 .rodata . : {
40 __RODATA_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010041
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010042 *(.rodata*)
Masahiro Yamadad9743012020-01-17 13:45:14 +090043
Chris Kayf90fe022022-09-29 14:36:53 +010044 RODATA_COMMON
Masahiro Yamadad9743012020-01-17 13:45:14 +090045
Roberto Vargas5629b2b2018-04-11 11:53:31 +010046 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010047
Sandrine Bailleux5d1c1042016-07-08 14:37:40 +010048 __RODATA_END__ = .;
49 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +010050#else /* SEPARATE_CODE_AND_RODATA */
Chris Kayda043412023-02-14 11:30:04 +000051 .ro . : {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000052 __RO_START__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010053
Andrew Thoelkedccc5372014-03-18 07:13:52 +000054 *tsp_entrypoint.o(.text*)
55 *(.text*)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000056 *(.rodata*)
Masahiro Yamadad9743012020-01-17 13:45:14 +090057
Chris Kayf90fe022022-09-29 14:36:53 +010058 RODATA_COMMON
Masahiro Yamadad9743012020-01-17 13:45:14 +090059
Achin Gupta7c88f3f2014-02-18 18:09:12 +000060 *(.vectors)
Masahiro Yamadad9743012020-01-17 13:45:14 +090061
Achin Gupta7c88f3f2014-02-18 18:09:12 +000062 __RO_END_UNALIGNED__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010063
Achin Gupta7c88f3f2014-02-18 18:09:12 +000064 /*
Chris Kayf90fe022022-09-29 14:36:53 +010065 * Memory page(s) mapped to this section will be marked as read-only,
66 * executable. No RW data from the next section must creep in. Ensure
67 * that the rest of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000068 */
Roberto Vargas5629b2b2018-04-11 11:53:31 +010069 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +010070
Achin Gupta7c88f3f2014-02-18 18:09:12 +000071 __RO_END__ = .;
72 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +010073#endif /* SEPARATE_CODE_AND_RODATA */
Achin Gupta7c88f3f2014-02-18 18:09:12 +000074
Chris Kayf90fe022022-09-29 14:36:53 +010075 __RW_START__ = .;
Achin Gupta54dc71e2015-09-11 16:03:13 +010076
Masahiro Yamadacaa3e7e2020-04-22 10:50:12 +090077 DATA_SECTION >RAM
Masahiro Yamadae8ad6162020-04-22 11:27:55 +090078 RELA_SECTION >RAM
Masahiro Yamadad9743012020-01-17 13:45:14 +090079
Dan Handley5a06bb72014-08-04 11:41:20 +010080#ifdef TSP_PROGBITS_LIMIT
81 ASSERT(. <= TSP_PROGBITS_LIMIT, "TSP progbits has exceeded its limit.")
Chris Kayf90fe022022-09-29 14:36:53 +010082#endif /* TSP_PROGBITS_LIMIT */
Sandrine Bailleuxa1b6db62014-06-16 16:12:27 +010083
Masahiro Yamadaa926a9f2020-04-07 13:04:24 +090084 STACK_SECTION >RAM
Masahiro Yamadaa7739bc2020-03-26 13:16:33 +090085 BSS_SECTION >RAM
Masahiro Yamada665e71b2020-03-09 17:39:48 +090086 XLAT_TABLE_SECTION >RAM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000087
Soby Mathewab8707e2015-01-08 18:02:44 +000088#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +000089 /*
Chris Kayf90fe022022-09-29 14:36:53 +010090 * The base address of the coherent memory section must be page-aligned to
91 * guarantee that the coherent data are stored on their own pages and are
92 * not mixed with normal data. This is required to set up the correct memory
93 * attributes for the coherent data page tables.
Achin Gupta7c88f3f2014-02-18 18:09:12 +000094 */
Chris Kayda043412023-02-14 11:30:04 +000095 .coherent_ram (NOLOAD) : ALIGN(PAGE_SIZE) {
Achin Gupta7c88f3f2014-02-18 18:09:12 +000096 __COHERENT_RAM_START__ = .;
Chris Kayda043412023-02-14 11:30:04 +000097 *(.tzfw_coherent_mem)
Achin Gupta7c88f3f2014-02-18 18:09:12 +000098 __COHERENT_RAM_END_UNALIGNED__ = .;
Chris Kayf90fe022022-09-29 14:36:53 +010099
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000100 /*
Chris Kayf90fe022022-09-29 14:36:53 +0100101 * Memory page(s) mapped to this section will be marked as device
102 * memory. No other unexpected data must creep in. Ensure that the rest
103 * of the current memory page is unused.
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000104 */
Roberto Vargas5629b2b2018-04-11 11:53:31 +0100105 . = ALIGN(PAGE_SIZE);
Chris Kayf90fe022022-09-29 14:36:53 +0100106
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000107 __COHERENT_RAM_END__ = .;
108 } >RAM
Chris Kayf90fe022022-09-29 14:36:53 +0100109#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000110
Achin Gupta54dc71e2015-09-11 16:03:13 +0100111 __RW_END__ = .;
Sandrine Bailleux53514b22014-05-20 17:28:25 +0100112 __BL32_END__ = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000113
Masahiro Yamadad9743012020-01-17 13:45:14 +0900114 /DISCARD/ : {
115 *(.dynsym .dynstr .hash .gnu.hash)
116 }
117
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000118 __BSS_SIZE__ = SIZEOF(.bss);
Chris Kayf90fe022022-09-29 14:36:53 +0100119
Soby Mathewab8707e2015-01-08 18:02:44 +0000120#if USE_COHERENT_MEM
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000121 __COHERENT_RAM_UNALIGNED_SIZE__ =
122 __COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
Chris Kayf90fe022022-09-29 14:36:53 +0100123#endif /* USE_COHERENT_MEM */
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000124
Juan Castillod1786372015-12-14 09:35:25 +0000125 ASSERT(. <= BL32_LIMIT, "BL32 image has exceeded its limit.")
Harrison Mutaifcb72e12023-04-19 09:30:15 +0100126 RAM_REGION_END = .;
Achin Gupta7c88f3f2014-02-18 18:09:12 +0000127}