ARMv8 Trusted Firmware release v0.2
diff --git a/arch/aarch64/cpu/cpu_helpers.S b/arch/aarch64/cpu/cpu_helpers.S
new file mode 100644
index 0000000..600b72f
--- /dev/null
+++ b/arch/aarch64/cpu/cpu_helpers.S
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2013, ARM Limited. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * Redistributions of source code must retain the above copyright notice, this
+ * list of conditions and the following disclaimer.
+ *
+ * Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * Neither the name of ARM nor the names of its contributors may be used
+ * to endorse or promote products derived from this software without specific
+ * prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <arch.h>
+
+	.weak	cpu_reset_handler
+
+
+	.section	aarch64_code, "ax"; .align 3
+
+cpu_reset_handler:; .type cpu_reset_handler, %function
+	mov	x19, x30 // lr
+
+	/* ---------------------------------------------
+	 * As a bare minimal enable the SMP bit and the
+	 * I$ for all aarch64 processors. Also set the
+	 * exception vector to something sane.
+	 * ---------------------------------------------
+	 */
+	adr	x0, early_exceptions
+	bl	write_vbar
+
+	bl	read_midr
+	lsr	x0, x0, #MIDR_PN_SHIFT
+	and	x0, x0, #MIDR_PN_MASK
+	cmp	x0, #MIDR_PN_A57
+	b.eq	smp_setup_begin
+	cmp	x0, #MIDR_PN_A53
+	b.ne	smp_setup_end
+smp_setup_begin:
+	bl	read_cpuectlr
+	orr	x0, x0, #CPUECTLR_SMP_BIT
+	bl	write_cpuectlr
+smp_setup_end:
+	bl	read_sctlr
+	orr	x0, x0, #SCTLR_I_BIT
+	bl	write_sctlr
+
+	ret	x19