blob: 6cabea2044e51ba743cc5a376aa05bae8fff4e69 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
Alexei Fedorov86c3e442020-07-17 17:03:25 +01002 * Copyright (c) 2018-2020, Arm Limited. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <assert_macros.S>
10
11 .globl zeromem
12 .globl memcpy4
13 .globl disable_mmu_icache
14
15/* -----------------------------------------------------------------------
16 * void zeromem(void *mem, unsigned int length);
17 *
18 * Initialise a memory region to 0.
Alexei Fedorov86c3e442020-07-17 17:03:25 +010019 * The memory address must be 4-byte aligned.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020020 * -----------------------------------------------------------------------
21 */
22func zeromem
23#if ENABLE_ASSERTIONS
24 tst r0, #0x3
25 ASM_ASSERT(eq)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020026#endif
Alexei Fedorov86c3e442020-07-17 17:03:25 +010027 mov r2, #0
28/* zero 4 bytes at a time */
29z_loop4:
30 cmp r1, #4
31 blo z_loop1
32 str r2, [r0], #4
33 subs r1, r1, #4
34 bne z_loop4
35 bx lr
36
37/* zero byte per byte */
38z_loop1:
39 strb r2, [r0], #1
40 subs r1, r1, #1
41 bne z_loop1
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020042 bx lr
43endfunc zeromem
44
45/* --------------------------------------------------------------------------
46 * void memcpy4(void *dest, const void *src, unsigned int length)
47 *
48 * Copy length bytes from memory area src to memory area dest.
49 * The memory areas should not overlap.
50 * Destination and source addresses must be 4-byte aligned.
51 * --------------------------------------------------------------------------
52 */
53func memcpy4
54#if ASM_ASSERTION
55 orr r3, r0, r1
56 tst r3, #0x3
57 ASM_ASSERT(eq)
58#endif
59/* copy 4 bytes at a time */
60m_loop4:
61 cmp r2, #4
Alexei Fedorov86c3e442020-07-17 17:03:25 +010062 blo m_loop1
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020063 ldr r3, [r1], #4
64 str r3, [r0], #4
Alexei Fedorov86c3e442020-07-17 17:03:25 +010065 subs r2, r2, #4
66 bne m_loop4
67 bx lr
68
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020069/* copy byte per byte */
70m_loop1:
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020071 ldrb r3, [r1], #1
72 strb r3, [r0], #1
73 subs r2, r2, #1
74 bne m_loop1
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020075 bx lr
76endfunc memcpy4
77
78/* ---------------------------------------------------------------------------
79 * Disable the MMU in Secure State
80 * ---------------------------------------------------------------------------
81 */
82
83func disable_mmu
84 mov r1, #(HSCTLR_M_BIT | HSCTLR_C_BIT)
85do_disable_mmu:
86 ldcopr r0, HSCTLR
87 bic r0, r0, r1
88 stcopr r0, HSCTLR
89 isb // ensure MMU is off
90 dsb sy
91 bx lr
92endfunc disable_mmu
93
94
95func disable_mmu_icache
96 ldr r1, =(HSCTLR_M_BIT | HSCTLR_C_BIT | HSCTLR_I_BIT)
97 b do_disable_mmu
98endfunc disable_mmu_icache