diff mbox series

[v2,2/7] cortina: common: armv8: add custom init for CA ARMv8 based SoCs

Message ID 1579601912-27737-3-git-send-email-alex.nemirovsky@cortina-access.com
State New
Headers show
Series Add Cortina Access basic DM drivers | expand

Commit Message

Alex Nemirovsky Jan. 21, 2020, 10:19 a.m. UTC
Cortina Access ARMv8 boards share common custom
ARMV8 init routines.

Add common board init code for Cortina Access SoC ARMv8 based SoCs

Signed-off-by: Alex Nemirovsky <alex.nemirovsky at cortina-access.com>
---

Changes in v2: None

 board/cortina/common/armv8/lowlevel_init.S | 87 ++++++++++++++++++++++++++++++
 1 file changed, 87 insertions(+)
 create mode 100644 board/cortina/common/armv8/lowlevel_init.S
diff mbox series

Patch

diff --git a/board/cortina/common/armv8/lowlevel_init.S b/board/cortina/common/armv8/lowlevel_init.S
new file mode 100644
index 0000000..702611b
--- /dev/null
+++ b/board/cortina/common/armv8/lowlevel_init.S
@@ -0,0 +1,87 @@ 
+/* SPDX-License-Identifier: GPL-2.0+ */
+/*
+ * Copyright (C) 2020 Cortina-Access
+ *
+ */
+
+
+#include <asm-offsets.h>
+#include <config.h>
+#include <linux/linkage.h>
+#include <asm/macro.h>
+#include <asm/armv8/mmu.h>
+
+	.globl lowlevel_init
+lowlevel_init:
+	mov	x29, lr			/* Save LR */
+
+#if defined(CONFIG_CA77XX)
+	/* Enable SMPEN in CPUECTLR */
+	mrs     x0, s3_1_c15_c2_1
+	tst     x0, #0x40
+        b.ne    skip_smp_setup
+	orr     x0, x0, #0x40
+	msr     s3_1_c15_c2_1, x0
+skip_smp_setup:
+#endif
+
+#if defined(CONFIG_CA8277B)
+	/* Enable CPU Timer */
+	ldr x0, =CONFIG_SYS_TIMER_BASE
+	mov x1, #1
+	str w1, [x0]
+#endif
+
+#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
+	branch_if_slave x0, 1f
+#ifndef CONFIG_TARGET_VENUS
+	ldr	x0, =GICD_BASE
+	bl	gic_init_secure
+#endif
+1:
+#if defined(CONFIG_GICV3)
+	ldr	x0, =GICR_BASE
+	bl	gic_init_secure_percpu
+#elif defined(CONFIG_GICV2)
+	ldr	x0, =GICD_BASE
+	ldr	x1, =GICC_BASE
+	bl	gic_init_secure_percpu
+#endif
+#endif
+
+#ifdef CONFIG_ARMV8_MULTIENTRY
+	branch_if_master x0, x1, 2f
+
+	/*
+	 * Slave should wait for master clearing spin table.
+	 * This sync prevent salves observing incorrect
+	 * value of spin table and jumping to wrong place.
+	 */
+#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
+#ifdef CONFIG_GICV2
+	ldr	x0, =GICC_BASE
+#endif
+	bl	gic_wait_for_interrupt
+#endif
+
+	/*
+	 * All slaves will enter EL2 and optionally EL1.
+	 */
+	adr	x4, lowlevel_in_el2
+	ldr	x5, =ES_TO_AARCH64
+	bl	armv8_switch_to_el2
+
+lowlevel_in_el2:
+#ifdef CONFIG_ARMV8_SWITCH_TO_EL1
+	adr	x4, lowlevel_in_el1
+	ldr	x5, =ES_TO_AARCH64
+	bl	armv8_switch_to_el1
+
+lowlevel_in_el1:
+#endif
+
+#endif /* CONFIG_ARMV8_MULTIENTRY */
+
+2:
+	mov	lr, x29			/* Restore LR */
+	ret