Merge "fix(pie): invalidate data cache in the entire image range if PIE is enabled" into integration
diff --git a/bl32/tsp/aarch64/tsp_entrypoint.S b/bl32/tsp/aarch64/tsp_entrypoint.S
index 795c586..7d77f47 100644
--- a/bl32/tsp/aarch64/tsp_entrypoint.S
+++ b/bl32/tsp/aarch64/tsp_entrypoint.S
@@ -100,11 +100,27 @@
 	 * sections. This is done to safeguard against
 	 * possible corruption of this memory by dirty
 	 * cache lines in a system cache as a result of
-	 * use by an earlier boot loader stage.
+	 * use by an earlier boot loader stage. If PIE
+	 * is enabled however, RO sections including the
+	 * GOT may be modified during pie fixup.
+	 * Therefore, to be on the safe side, invalidate
+	 * the entire image region if PIE is enabled.
 	 * ---------------------------------------------
 	 */
-	adr	x0, __RW_START__
-	adr	x1, __RW_END__
+#if ENABLE_PIE
+#if SEPARATE_CODE_AND_RODATA
+	adrp	x0, __TEXT_START__
+	add	x0, x0, :lo12:__TEXT_START__
+#else
+	adrp	x0, __RO_START__
+	add	x0, x0, :lo12:__RO_START__
+#endif /* SEPARATE_CODE_AND_RODATA */
+#else
+	adrp	x0, __RW_START__
+	add	x0, x0, :lo12:__RW_START__
+#endif /* ENABLE_PIE */
+	adrp	x1, __RW_END__
+	add     x1, x1, :lo12:__RW_END__
 	sub	x1, x1, x0
 	bl	inv_dcache_range
 
diff --git a/include/arch/aarch32/el3_common_macros.S b/include/arch/aarch32/el3_common_macros.S
index 65f9a8e..ad2a039 100644
--- a/include/arch/aarch32/el3_common_macros.S
+++ b/include/arch/aarch32/el3_common_macros.S
@@ -380,10 +380,21 @@
 		 * includes the data and NOBITS sections. This is done to
 		 * safeguard against possible corruption of this memory by
 		 * dirty cache lines in a system cache as a result of use by
-		 * an earlier boot loader stage.
+		 * an earlier boot loader stage. If PIE is enabled however,
+		 * RO sections including the GOT may be modified during
+		 * pie fixup. Therefore, to be on the safe side, invalidate
+		 * the entire image region if PIE is enabled.
 		 * -----------------------------------------------------------------
 		 */
+#if ENABLE_PIE
+#if SEPARATE_CODE_AND_RODATA
+		ldr	r0, =__TEXT_START__
+#else
+		ldr	r0, =__RO_START__
+#endif /* SEPARATE_CODE_AND_RODATA */
+#else
 		ldr	r0, =__RW_START__
+#endif /* ENABLE_PIE */
 		ldr	r1, =__RW_END__
 		sub	r1, r1, r0
 		bl	inv_dcache_range
diff --git a/include/arch/aarch64/el3_common_macros.S b/include/arch/aarch64/el3_common_macros.S
index 7d6a963..8e8d334 100644
--- a/include/arch/aarch64/el3_common_macros.S
+++ b/include/arch/aarch64/el3_common_macros.S
@@ -430,11 +430,24 @@
 		 * includes the data and NOBITS sections. This is done to
 		 * safeguard against possible corruption of this memory by
 		 * dirty cache lines in a system cache as a result of use by
-		 * an earlier boot loader stage.
+		 * an earlier boot loader stage. If PIE is enabled however,
+		 * RO sections including the GOT may be modified during
+                 * pie fixup. Therefore, to be on the safe side, invalidate
+		 * the entire image region if PIE is enabled.
 		 * -------------------------------------------------------------
 		 */
+#if ENABLE_PIE
+#if SEPARATE_CODE_AND_RODATA
+		adrp	x0, __TEXT_START__
+		add	x0, x0, :lo12:__TEXT_START__
+#else
+		adrp	x0, __RO_START__
+		add	x0, x0, :lo12:__RO_START__
+#endif /* SEPARATE_CODE_AND_RODATA */
+#else
 		adrp	x0, __RW_START__
 		add	x0, x0, :lo12:__RW_START__
+#endif /* ENABLE_PIE */
 		adrp	x1, __RW_END__
 		add	x1, x1, :lo12:__RW_END__
 		sub	x1, x1, x0