1/*
2 * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <asm_macros.S>
9#include <platform_def.h>
10#include <pmu_regs.h>
11
12	.globl	clst_warmboot_data
13
14	.macro sram_func _name
15	.cfi_sections .debug_frame
16	.section .sram.text, "ax"
17	.type \_name, %function
18	.cfi_startproc
19	\_name:
20	.endm
21
22#define CRU_CLKSEL_CON6	0x118
23
24#define DDRCTL0_C_SYSREQ_CFG 0x0100
25#define DDRCTL1_C_SYSREQ_CFG 0x1000
26
27#define DDRC0_SREF_DONE_EXT 0x01
28#define DDRC1_SREF_DONE_EXT 0x04
29
30#define PLL_MODE_SHIFT	(0x8)
31#define PLL_NORMAL_MODE	((0x3 << (PLL_MODE_SHIFT + 16)) | \
32						 (0x1 << PLL_MODE_SHIFT))
33#define MPIDR_CLST_L_BITS 0x0
34	/*
35	 * For different socs, if we want to speed up warmboot,
36	 * we need to config some regs here.
37	 * If scu was suspend, we must resume related clk
38	 * from slow (24M) mode to normal mode first.
39	 * X0: MPIDR_EL1 & MPIDR_CLUSTER_MASK
40	 */
41.macro	func_rockchip_clst_warmboot
42	adr	x4, clst_warmboot_data
43	lsr	x5, x0, #6
44	ldr	w3, [x4, x5]
45	str	wzr, [x4, x5]
46	cmp	w3, #PMU_CLST_RET
47	b.ne	clst_warmboot_end
48	ldr	w6, =(PLL_NORMAL_MODE)
49	/*
50	 * core_l offset is CRU_BASE + 0xc,
51	 * core_b offset is CRU_BASE + 0x2c
52	 */
53	ldr	x7, =(CRU_BASE + 0xc)
54	lsr	x2, x0, #3
55	str	w6, [x7, x2]
56clst_warmboot_end:
57.endm
58
59.macro rockchip_clst_warmboot_data
60clst_warmboot_data:
61	.rept	PLATFORM_CLUSTER_COUNT
62	.word	0
63	.endr
64.endm
65
66	/* -----------------------------------------------
67	 * void sram_func_set_ddrctl_pll(uint32_t pll_src)
68	 * Function to switch the PLL source for ddrctrl
69	 * In: x0 - The PLL of the clk_ddrc clock source
70	 * out: None
71	 * Clobber list : x0 - x3, x5, x8 - x10
72	 * -----------------------------------------------
73	 */
74
75	.globl	sram_func_set_ddrctl_pll
76
77sram_func sram_func_set_ddrctl_pll
78	/* backup parameter */
79	mov	x8, x0
80
81	/* disable the MMU at EL3 */
82	mrs 	x9, sctlr_el3
83	bic	x10, x9, #(SCTLR_M_BIT)
84	msr 	sctlr_el3, x10
85	isb
86	dsb 	sy
87
88	/* enable ddrctl0_1 idle request */
89	mov	x5, PMU_BASE
90	ldr	w0, [x5, #PMU_SFT_CON]
91	orr	w0, w0, #DDRCTL0_C_SYSREQ_CFG
92	orr	w0, w0, #DDRCTL1_C_SYSREQ_CFG
93	str	w0, [x5, #PMU_SFT_CON]
94
95check_ddrc0_1_sref_enter:
96	ldr	w1, [x5, #PMU_DDR_SREF_ST]
97	and	w2, w1, #DDRC0_SREF_DONE_EXT
98	and	w3, w1, #DDRC1_SREF_DONE_EXT
99	orr	w2, w2, w3
100	cmp	w2, #(DDRC0_SREF_DONE_EXT | DDRC1_SREF_DONE_EXT)
101	b.eq	check_ddrc0_1_sref_enter
102
103	/*
104	 * select a PLL for ddrctrl:
105	 * x0 = 0: ALPLL
106	 * x0 = 1: ABPLL
107	 * x0 = 2: DPLL
108	 * x0 = 3: GPLLL
109	 */
110	mov     x5, CRU_BASE
111	lsl	w0, w8, #4
112	orr	w0, w0, #0x00300000
113	str 	w0, [x5, #CRU_CLKSEL_CON6]
114
115	/* disable ddrctl0_1 idle request */
116	mov	x5, PMU_BASE
117	ldr	w0, [x5, #PMU_SFT_CON]
118	bic	w0, w0, #DDRCTL0_C_SYSREQ_CFG
119	bic	w0, w0, #DDRCTL1_C_SYSREQ_CFG
120	str	w0, [x5, #PMU_SFT_CON]
121
122check_ddrc0_1_sref_exit:
123	ldr	w1, [x5, #PMU_DDR_SREF_ST]
124	and	w2, w1, #DDRC0_SREF_DONE_EXT
125	and	w3, w1, #DDRC1_SREF_DONE_EXT
126	orr	w2, w2, w3
127	cmp	w2, #0x0
128	b.eq	check_ddrc0_1_sref_exit
129
130	/* reenable the MMU at EL3 */
131	msr 	sctlr_el3, x9
132	isb
133	dsb 	sy
134
135	ret
136endfunc sram_func_set_ddrctl_pll
137