diff mbox series

[2/2] MAINTAINERS, cortina: add custom init for CA ARMv8 based SoCs

Message ID 1579511193-8034-2-git-send-email-alex.nemirovsky@cortina-access.com
State Superseded
Headers show
Series [1/2] MAINTAINERS, cortina: add common feature and platform enablement flag | expand

Commit Message

Alex Nemirovsky Jan. 20, 2020, 9:06 a.m. UTC
Cortina Access ARMv8 boards share common custom
ARMV8 init routines.

Add common board init code for Cortina Access SoC ARMv8 based SoCs

Signed-off-by: Alex Nemirovsky <alex.nemirovsky at cortina-access.com>
---

 MAINTAINERS                                |  2 +-
 board/cortina/common/armv8/lowlevel_init.S | 87 ++++++++++++++++++++++++++++++
 2 files changed, 88 insertions(+), 1 deletion(-)
 create mode 100644 board/cortina/common/armv8/lowlevel_init.S
diff mbox series

Patch

diff --git a/MAINTAINERS b/MAINTAINERS
index 394a089..02b2e11 100644
--- a/MAINTAINERS
+++ b/MAINTAINERS
@@ -178,7 +178,7 @@  M:	Alex Nemirovsky <alex.nemirovsky at cortina-access.com>
 S:	Supported
 F:	board/cortina/common/*
 F:	board/cortina/common/Kconfig
-F:	board/cortina/common/armv8/*
+F:	board/cortina/common/armv8/lowlevel_init.S
 
 ARM/CZ.NIC TURRIS MOX SUPPORT
 M:	Marek Behun <marek.behun at nic.cz>
diff --git a/board/cortina/common/armv8/lowlevel_init.S b/board/cortina/common/armv8/lowlevel_init.S
new file mode 100644
index 0000000..702611b
--- /dev/null
+++ b/board/cortina/common/armv8/lowlevel_init.S
@@ -0,0 +1,87 @@ 
+/* SPDX-License-Identifier: GPL-2.0+ */
+/*
+ * Copyright (C) 2020 Cortina-Access
+ *
+ */
+
+
+#include <asm-offsets.h>
+#include <config.h>
+#include <linux/linkage.h>
+#include <asm/macro.h>
+#include <asm/armv8/mmu.h>
+
+	.globl lowlevel_init
+lowlevel_init:
+	mov	x29, lr			/* Save LR */
+
+#if defined(CONFIG_CA77XX)
+	/* Enable SMPEN in CPUECTLR */
+	mrs     x0, s3_1_c15_c2_1
+	tst     x0, #0x40
+        b.ne    skip_smp_setup
+	orr     x0, x0, #0x40
+	msr     s3_1_c15_c2_1, x0
+skip_smp_setup:
+#endif
+
+#if defined(CONFIG_CA8277B)
+	/* Enable CPU Timer */
+	ldr x0, =CONFIG_SYS_TIMER_BASE
+	mov x1, #1
+	str w1, [x0]
+#endif
+
+#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
+	branch_if_slave x0, 1f
+#ifndef CONFIG_TARGET_VENUS
+	ldr	x0, =GICD_BASE
+	bl	gic_init_secure
+#endif
+1:
+#if defined(CONFIG_GICV3)
+	ldr	x0, =GICR_BASE
+	bl	gic_init_secure_percpu
+#elif defined(CONFIG_GICV2)
+	ldr	x0, =GICD_BASE
+	ldr	x1, =GICC_BASE
+	bl	gic_init_secure_percpu
+#endif
+#endif
+
+#ifdef CONFIG_ARMV8_MULTIENTRY
+	branch_if_master x0, x1, 2f
+
+	/*
+	 * Slave should wait for master clearing spin table.
+	 * This sync prevent salves observing incorrect
+	 * value of spin table and jumping to wrong place.
+	 */
+#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
+#ifdef CONFIG_GICV2
+	ldr	x0, =GICC_BASE
+#endif
+	bl	gic_wait_for_interrupt
+#endif
+
+	/*
+	 * All slaves will enter EL2 and optionally EL1.
+	 */
+	adr	x4, lowlevel_in_el2
+	ldr	x5, =ES_TO_AARCH64
+	bl	armv8_switch_to_el2
+
+lowlevel_in_el2:
+#ifdef CONFIG_ARMV8_SWITCH_TO_EL1
+	adr	x4, lowlevel_in_el1
+	ldr	x5, =ES_TO_AARCH64
+	bl	armv8_switch_to_el1
+
+lowlevel_in_el1:
+#endif
+
+#endif /* CONFIG_ARMV8_MULTIENTRY */
+
+2:
+	mov	lr, x29			/* Restore LR */
+	ret