AArch32: Add support for ARM Cortex-A53/57/72 MPCore Processor

This patch adds AArch32 state support for ARM Cortex-A53,
Cortex-A57 and Cortex-A72 MPCore Processor in the CPU specific
operations framework.

NOTE: CPU errata handling code is not present in this patch.

Change-Id: I01eb3e028e40dde37565707ebc99e06e7a0c113d
Signed-off-by: Yatharth Kochar <yatharth.kochar@arm.com>
Signed-off-by: dp-arm <dimitris.papastamos@arm.com>
This commit is contained in:
Yatharth Kochar 2016-11-10 16:17:51 +00:00 committed by dp-arm
parent 0498343ac0
commit dc787588a5
8 changed files with 856 additions and 0 deletions

View File

@ -134,4 +134,37 @@
.space SPINLOCK_ASM_SIZE
.endm
/*
* Helper macro to OR the bottom 32 bits of `_val` into `_reg_l`
* and the top 32 bits of `_val` into `_reg_h`. If either the bottom
* or top word of `_val` is zero, the corresponding OR operation
* is skipped.
*/
.macro orr64_imm _reg_l, _reg_h, _val
.if (\_val >> 32)
orr \_reg_h, \_reg_h, #(\_val >> 32)
.endif
.if (\_val & 0xffffffff)
orr \_reg_l, \_reg_l, #(\_val & 0xffffffff)
.endif
.endm
/*
* Helper macro to bitwise-clear bits in `_reg_l` and
* `_reg_h` given a 64 bit immediate `_val`. The set bits
* in the bottom word of `_val` dictate which bits from
* `_reg_l` should be cleared. Similarly, the set bits in
* the top word of `_val` dictate which bits from `_reg_h`
* should be cleared. If either the bottom or top word of
* `_val` is zero, the corresponding BIC operation is skipped.
*/
.macro bic64_imm _reg_l, _reg_h, _val
.if (\_val >> 32)
bic \_reg_h, \_reg_h, #(\_val >> 32)
.endif
.if (\_val & 0xffffffff)
bic \_reg_l, \_reg_l, #(\_val & 0xffffffff)
.endif
.endm
#endif /* __ASM_MACROS_S__ */

View File

@ -400,6 +400,7 @@
#define CLIDR p15, 1, c0, c0, 1
#define CSSELR p15, 2, c0, c0, 0
#define CCSIDR p15, 1, c0, c0, 0
#define DBGOSDLR p14, 0, c1, c3, 4
/* Debug register defines. The format is: coproc, opt1, CRn, CRm, opt2 */
#define HDCR p15, 4, c1, c1, 1

View File

@ -0,0 +1,92 @@
/*
* Copyright (c) 2016, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __CORTEX_A53_H__
#define __CORTEX_A53_H__
/* Cortex-A53 midr for revision 0 */
#define CORTEX_A53_MIDR 0x410FD030
/* Retention timer tick definitions */
#define RETENTION_ENTRY_TICKS_2 0x1
#define RETENTION_ENTRY_TICKS_8 0x2
#define RETENTION_ENTRY_TICKS_32 0x3
#define RETENTION_ENTRY_TICKS_64 0x4
#define RETENTION_ENTRY_TICKS_128 0x5
#define RETENTION_ENTRY_TICKS_256 0x6
#define RETENTION_ENTRY_TICKS_512 0x7
/*******************************************************************************
* CPU Extended Control register specific definitions.
******************************************************************************/
#define CPUECTLR p15, 1, c15 /* Instruction def. */
#define CPUECTLR_SMP_BIT (1 << 6)
#define CPUECTLR_CPU_RET_CTRL_SHIFT 0
#define CPUECTLR_CPU_RET_CTRL_MASK (0x7 << CPUECTLR_CPU_RET_CTRL_SHIFT)
#define CPUECTLR_FPU_RET_CTRL_SHIFT 3
#define CPUECTLR_FPU_RET_CTRL_MASK (0x7 << CPUECTLR_FPU_RET_CTRL_SHIFT)
/*******************************************************************************
* CPU Memory Error Syndrome register specific definitions.
******************************************************************************/
#define CPUMERRSR p15, 2, c15 /* Instruction def. */
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CPUACTLR p15, 0, c15 /* Instruction def. */
#define CPUACTLR_DTAH (1 << 24)
/*******************************************************************************
* L2 Auxiliary Control register specific definitions.
******************************************************************************/
#define L2ACTLR p15, 1, c15, c0, 0 /* Instruction def. */
#define L2ACTLR_ENABLE_UNIQUECLEAN (1 << 14)
#define L2ACTLR_DISABLE_CLEAN_PUSH (1 << 3)
/*******************************************************************************
* L2 Extended Control register specific definitions.
******************************************************************************/
#define L2ECTLR p15, 1, c9, c0, 3 /* Instruction def. */
#define L2ECTLR_RET_CTRL_SHIFT 0
#define L2ECTLR_RET_CTRL_MASK (0x7 << L2ECTLR_RET_CTRL_SHIFT)
/*******************************************************************************
* L2 Memory Error Syndrome register specific definitions.
******************************************************************************/
#define L2MERRSR p15, 3, c15 /* Instruction def. */
#endif /* __CORTEX_A53_H__ */

View File

@ -0,0 +1,103 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __CORTEX_A57_H__
#define __CORTEX_A57_H__
/* Cortex-A57 midr for revision 0 */
#define CORTEX_A57_MIDR 0x410FD070
/* Retention timer tick definitions */
#define RETENTION_ENTRY_TICKS_2 0x1
#define RETENTION_ENTRY_TICKS_8 0x2
#define RETENTION_ENTRY_TICKS_32 0x3
#define RETENTION_ENTRY_TICKS_64 0x4
#define RETENTION_ENTRY_TICKS_128 0x5
#define RETENTION_ENTRY_TICKS_256 0x6
#define RETENTION_ENTRY_TICKS_512 0x7
/*******************************************************************************
* CPU Extended Control register specific definitions.
******************************************************************************/
#define CPUECTLR p15, 1, c15 /* Instruction def. */
#define CPUECTLR_SMP_BIT (1 << 6)
#define CPUECTLR_DIS_TWD_ACC_PFTCH_BIT (1 << 38)
#define CPUECTLR_L2_IPFTCH_DIST_MASK (0x3 << 35)
#define CPUECTLR_L2_DPFTCH_DIST_MASK (0x3 << 32)
#define CPUECTLR_CPU_RET_CTRL_SHIFT 0
#define CPUECTLR_CPU_RET_CTRL_MASK (0x7 << CPUECTLR_CPU_RET_CTRL_SHIFT)
/*******************************************************************************
* CPU Memory Error Syndrome register specific definitions.
******************************************************************************/
#define CPUMERRSR p15, 2, c15 /* Instruction def. */
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CPUACTLR p15, 0, c15 /* Instruction def. */
#define CPUACTLR_DIS_LOAD_PASS_DMB (1 << 59)
#define CPUACTLR_GRE_NGRE_AS_NGNRE (1 << 54)
#define CPUACTLR_DIS_OVERREAD (1 << 52)
#define CPUACTLR_NO_ALLOC_WBWA (1 << 49)
#define CPUACTLR_DCC_AS_DCCI (1 << 44)
#define CPUACTLR_FORCE_FPSCR_FLUSH (1 << 38)
#define CPUACTLR_DIS_STREAMING (3 << 27)
#define CPUACTLR_DIS_L1_STREAMING (3 << 25)
#define CPUACTLR_DIS_INDIRECT_PREDICTOR (1 << 4)
/*******************************************************************************
* L2 Control register specific definitions.
******************************************************************************/
#define L2CTLR p15, 1, c9, c0, 3 /* Instruction def. */
#define L2CTLR_DATA_RAM_LATENCY_SHIFT 0
#define L2CTLR_TAG_RAM_LATENCY_SHIFT 6
#define L2_DATA_RAM_LATENCY_3_CYCLES 0x2
#define L2_TAG_RAM_LATENCY_3_CYCLES 0x2
/*******************************************************************************
* L2 Extended Control register specific definitions.
******************************************************************************/
#define L2ECTLR p15, 1, c9, c0, 3 /* Instruction def. */
#define L2ECTLR_RET_CTRL_SHIFT 0
#define L2ECTLR_RET_CTRL_MASK (0x7 << L2ECTLR_RET_CTRL_SHIFT)
/*******************************************************************************
* L2 Memory Error Syndrome register specific definitions.
******************************************************************************/
#define L2MERRSR p15, 3, c15 /* Instruction def. */
#endif /* __CORTEX_A57_H__ */

View File

@ -0,0 +1,78 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __CORTEX_A72_H__
#define __CORTEX_A72_H__
/* Cortex-A72 midr for revision 0 */
#define CORTEX_A72_MIDR 0x410FD080
/*******************************************************************************
* CPU Extended Control register specific definitions.
******************************************************************************/
#define CPUECTLR p15, 1, c15 /* Instruction def. */
#define CPUECTLR_SMP_BIT (1 << 6)
#define CPUECTLR_DIS_TWD_ACC_PFTCH_BIT (1 << 38)
#define CPUECTLR_L2_IPFTCH_DIST_MASK (0x3 << 35)
#define CPUECTLR_L2_DPFTCH_DIST_MASK (0x3 << 32)
/*******************************************************************************
* CPU Memory Error Syndrome register specific definitions.
******************************************************************************/
#define CPUMERRSR p15, 2, c15 /* Instruction def. */
/*******************************************************************************
* CPU Auxiliary Control register specific definitions.
******************************************************************************/
#define CPUACTLR p15, 0, c15 /* Instruction def. */
#define CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH (1 << 56)
#define CPUACTLR_NO_ALLOC_WBWA (1 << 49)
#define CPUACTLR_DCC_AS_DCCI (1 << 44)
/*******************************************************************************
* L2 Control register specific definitions.
******************************************************************************/
#define L2CTLR p15, 1, c9, c0, 3 /* Instruction def. */
#define L2CTLR_DATA_RAM_LATENCY_SHIFT 0
#define L2CTLR_TAG_RAM_LATENCY_SHIFT 6
#define L2_DATA_RAM_LATENCY_3_CYCLES 0x2
#define L2_TAG_RAM_LATENCY_2_CYCLES 0x1
#define L2_TAG_RAM_LATENCY_3_CYCLES 0x2
/*******************************************************************************
* L2 Memory Error Syndrome register specific definitions.
******************************************************************************/
#define L2MERRSR p15, 3, c15 /* Instruction def. */
#endif /* __CORTEX_A72_H__ */

View File

@ -0,0 +1,141 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a53.h>
#include <cpu_macros.S>
#include <debug.h>
/* ---------------------------------------------
* Disable intra-cluster coherency
* ---------------------------------------------
*/
func cortex_a53_disable_smp
ldcopr16 r0, r1, CPUECTLR
bic64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
isb
dsb sy
bx lr
endfunc cortex_a53_disable_smp
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A53.
* -------------------------------------------------
*/
func cortex_a53_reset_func
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
*/
ldcopr16 r0, r1, CPUECTLR
orr64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
isb
bx lr
endfunc cortex_a53_reset_func
/* ----------------------------------------------------
* The CPU Ops core power down function for Cortex-A53.
* ----------------------------------------------------
*/
func cortex_a53_core_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a53_disable_smp
endfunc cortex_a53_core_pwr_dwn
/* -------------------------------------------------------
* The CPU Ops cluster power down function for Cortex-A53.
* Clobbers: r0-r3
* -------------------------------------------------------
*/
func cortex_a53_cluster_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* ---------------------------------------------
* Disable the optional ACP.
* ---------------------------------------------
*/
bl plat_disable_acp
/* ---------------------------------------------
* Flush L2 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level2
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a53_disable_smp
endfunc cortex_a53_cluster_pwr_dwn
declare_cpu_ops cortex_a53, CORTEX_A53_MIDR, \
cortex_a53_reset_func, \
cortex_a53_core_pwr_dwn, \
cortex_a53_cluster_pwr_dwn

View File

@ -0,0 +1,192 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a57.h>
#include <cpu_macros.S>
#include <debug.h>
/* ---------------------------------------------
* Disable intra-cluster coherency
* Clobbers: r0-r1
* ---------------------------------------------
*/
func cortex_a57_disable_smp
ldcopr16 r0, r1, CPUECTLR
bic64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
bx lr
endfunc cortex_a57_disable_smp
/* ---------------------------------------------
* Disable all types of L2 prefetches.
* Clobbers: r0-r2
* ---------------------------------------------
*/
func cortex_a57_disable_l2_prefetch
ldcopr16 r0, r1, CPUECTLR
orr64_imm r0, r1, CPUECTLR_DIS_TWD_ACC_PFTCH_BIT
bic64_imm r0, r1, (CPUECTLR_L2_IPFTCH_DIST_MASK | \
CPUECTLR_L2_DPFTCH_DIST_MASK)
stcopr16 r0, r1, CPUECTLR
isb
dsb ish
bx lr
endfunc cortex_a57_disable_l2_prefetch
/* ---------------------------------------------
* Disable debug interfaces
* ---------------------------------------------
*/
func cortex_a57_disable_ext_debug
mov r0, #1
stcopr r0, DBGOSDLR
isb
dsb sy
bx lr
endfunc cortex_a57_disable_ext_debug
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A57.
* -------------------------------------------------
*/
func cortex_a57_reset_func
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
*/
ldcopr16 r0, r1, CPUECTLR
orr64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
isb
bx lr
endfunc cortex_a57_reset_func
/* ----------------------------------------------------
* The CPU Ops core power down function for Cortex-A57.
* ----------------------------------------------------
*/
func cortex_a57_core_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Disable the L2 prefetches.
* ---------------------------------------------
*/
bl cortex_a57_disable_l2_prefetch
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
bl cortex_a57_disable_smp
/* ---------------------------------------------
* Force the debug interfaces to be quiescent
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a57_disable_ext_debug
endfunc cortex_a57_core_pwr_dwn
/* -------------------------------------------------------
* The CPU Ops cluster power down function for Cortex-A57.
* Clobbers: r0-r3
* -------------------------------------------------------
*/
func cortex_a57_cluster_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Disable the L2 prefetches.
* ---------------------------------------------
*/
bl cortex_a57_disable_l2_prefetch
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* ---------------------------------------------
* Disable the optional ACP.
* ---------------------------------------------
*/
bl plat_disable_acp
/* ---------------------------------------------
* Flush L2 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level2
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
bl cortex_a57_disable_smp
/* ---------------------------------------------
* Force the debug interfaces to be quiescent
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a57_disable_ext_debug
endfunc cortex_a57_cluster_pwr_dwn
declare_cpu_ops cortex_a57, CORTEX_A57_MIDR, \
cortex_a57_reset_func, \
cortex_a57_core_pwr_dwn, \
cortex_a57_cluster_pwr_dwn

View File

@ -0,0 +1,216 @@
/*
* Copyright (c) 2017, ARM Limited and Contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of ARM nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a72.h>
#include <cpu_macros.S>
#include <debug.h>
/* ---------------------------------------------
* Disable all types of L2 prefetches.
* ---------------------------------------------
*/
func cortex_a72_disable_l2_prefetch
ldcopr16 r0, r1, CPUECTLR
orr64_imm r0, r1, CPUECTLR_DIS_TWD_ACC_PFTCH_BIT
bic64_imm r0, r1, (CPUECTLR_L2_IPFTCH_DIST_MASK | \
CPUECTLR_L2_DPFTCH_DIST_MASK)
stcopr16 r0, r1, CPUECTLR
isb
bx lr
endfunc cortex_a72_disable_l2_prefetch
/* ---------------------------------------------
* Disable the load-store hardware prefetcher.
* ---------------------------------------------
*/
func cortex_a72_disable_hw_prefetcher
ldcopr16 r0, r1, CPUACTLR
orr64_imm r0, r1, CPUACTLR_DISABLE_L1_DCACHE_HW_PFTCH
stcopr16 r0, r1, CPUACTLR
isb
dsb ish
bx lr
endfunc cortex_a72_disable_hw_prefetcher
/* ---------------------------------------------
* Disable intra-cluster coherency
* Clobbers: r0-r1
* ---------------------------------------------
*/
func cortex_a72_disable_smp
ldcopr16 r0, r1, CPUECTLR
bic64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
bx lr
endfunc cortex_a72_disable_smp
/* ---------------------------------------------
* Disable debug interfaces
* ---------------------------------------------
*/
func cortex_a72_disable_ext_debug
mov r0, #1
stcopr r0, DBGOSDLR
isb
dsb sy
bx lr
endfunc cortex_a72_disable_ext_debug
/* -------------------------------------------------
* The CPU Ops reset function for Cortex-A72.
* -------------------------------------------------
*/
func cortex_a72_reset_func
/* ---------------------------------------------
* Enable the SMP bit.
* ---------------------------------------------
*/
ldcopr16 r0, r1, CPUECTLR
orr64_imm r0, r1, CPUECTLR_SMP_BIT
stcopr16 r0, r1, CPUECTLR
isb
bx lr
endfunc cortex_a72_reset_func
/* ----------------------------------------------------
* The CPU Ops core power down function for Cortex-A72.
* ----------------------------------------------------
*/
func cortex_a72_core_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Disable the L2 prefetches.
* ---------------------------------------------
*/
bl cortex_a72_disable_l2_prefetch
/* ---------------------------------------------
* Disable the load-store hardware prefetcher.
* ---------------------------------------------
*/
bl cortex_a72_disable_hw_prefetcher
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
bl cortex_a72_disable_smp
/* ---------------------------------------------
* Force the debug interfaces to be quiescent
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a72_disable_ext_debug
endfunc cortex_a72_core_pwr_dwn
/* -------------------------------------------------------
* The CPU Ops cluster power down function for Cortex-A72.
* -------------------------------------------------------
*/
func cortex_a72_cluster_pwr_dwn
push {r12, lr}
/* Assert if cache is enabled */
#if ASM_ASSERTION
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
/* ---------------------------------------------
* Disable the L2 prefetches.
* ---------------------------------------------
*/
bl cortex_a72_disable_l2_prefetch
/* ---------------------------------------------
* Disable the load-store hardware prefetcher.
* ---------------------------------------------
*/
bl cortex_a72_disable_hw_prefetcher
#if !SKIP_A72_L1_FLUSH_PWR_DWN
/* ---------------------------------------------
* Flush L1 caches.
* ---------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level1
#endif
/* ---------------------------------------------
* Disable the optional ACP.
* ---------------------------------------------
*/
bl plat_disable_acp
/* -------------------------------------------------
* Flush the L2 caches.
* -------------------------------------------------
*/
mov r0, #DC_OP_CISW
bl dcsw_op_level2
/* ---------------------------------------------
* Come out of intra cluster coherency
* ---------------------------------------------
*/
bl cortex_a72_disable_smp
/* ---------------------------------------------
* Force the debug interfaces to be quiescent
* ---------------------------------------------
*/
pop {r12, lr}
b cortex_a72_disable_ext_debug
endfunc cortex_a72_cluster_pwr_dwn
declare_cpu_ops cortex_a72, CORTEX_A72_MIDR, \
cortex_a72_reset_func, \
cortex_a72_core_pwr_dwn, \
cortex_a72_cluster_pwr_dwn