xref: /linux/arch/arm64/kernel/entry-fpsimd.S (revision a36e9f5cfe9eb3a1dce8769c7058251c42705357)
1/* SPDX-License-Identifier: GPL-2.0-only */
2/*
3 * FP/SIMD state saving and restoring
4 *
5 * Copyright (C) 2012 ARM Ltd.
6 * Author: Catalin Marinas <catalin.marinas@arm.com>
7 */
8
9#include <linux/linkage.h>
10
11#include <asm/assembler.h>
12#include <asm/fpsimdmacros.h>
13
14/*
15 * Save the FP registers.
16 *
17 * x0 - pointer to struct fpsimd_state
18 */
19SYM_FUNC_START(fpsimd_save_state)
20	fpsimd_save x0, 8
21	ret
22SYM_FUNC_END(fpsimd_save_state)
23
24/*
25 * Load the FP registers.
26 *
27 * x0 - pointer to struct fpsimd_state
28 */
29SYM_FUNC_START(fpsimd_load_state)
30	fpsimd_restore x0, 8
31	ret
32SYM_FUNC_END(fpsimd_load_state)
33
34#ifdef CONFIG_ARM64_SVE
35
36/*
37 * Save the SVE state
38 *
39 * x0 - pointer to buffer for state
40 * x1 - pointer to storage for FPSR
41 * x2 - Save FFR if non-zero
42 */
43SYM_FUNC_START(sve_save_state)
44	sve_save 0, x1, x2, 3
45	ret
46SYM_FUNC_END(sve_save_state)
47
48/*
49 * Load the SVE state
50 *
51 * x0 - pointer to buffer for state
52 * x1 - pointer to storage for FPSR
53 * x2 - Restore FFR if non-zero
54 */
55SYM_FUNC_START(sve_load_state)
56	sve_load 0, x1, x2, 4
57	ret
58SYM_FUNC_END(sve_load_state)
59
60SYM_FUNC_START(sve_get_vl)
61	_sve_rdvl	0, 1
62	ret
63SYM_FUNC_END(sve_get_vl)
64
65SYM_FUNC_START(sve_set_vq)
66	sve_load_vq x0, x1, x2
67	ret
68SYM_FUNC_END(sve_set_vq)
69
70/*
71 * Zero all SVE registers but the first 128-bits of each vector
72 *
73 * VQ must already be configured by caller, any further updates of VQ
74 * will need to ensure that the register state remains valid.
75 *
76 * x0 = include FFR?
77 * x1 = VQ - 1
78 */
79SYM_FUNC_START(sve_flush_live)
80	cbz		x1, 1f	// A VQ-1 of 0 is 128 bits so no extra Z state
81	sve_flush_z
821:	sve_flush_p
83	tbz		x0, #0, 2f
84	sve_flush_ffr
852:	ret
86SYM_FUNC_END(sve_flush_live)
87
88#endif /* CONFIG_ARM64_SVE */
89
90#ifdef CONFIG_ARM64_SME
91
92SYM_FUNC_START(sme_get_vl)
93	_sme_rdsvl	0, 1
94	ret
95SYM_FUNC_END(sme_get_vl)
96
97SYM_FUNC_START(sme_set_vq)
98	sme_load_vq x0, x1, x2
99	ret
100SYM_FUNC_END(sme_set_vq)
101
102/*
103 * Save the ZA and ZT state
104 *
105 * x0 - pointer to buffer for state
106 * x1 - number of ZT registers to save
107 */
108SYM_FUNC_START(sme_save_state)
109	_sme_rdsvl	2, 1		// x2 = VL/8
110	sme_save_za 0, x2, 12		// Leaves x0 pointing to the end of ZA
111
112	cbz	x1, 1f
113	_str_zt 0
1141:
115	ret
116SYM_FUNC_END(sme_save_state)
117
118/*
119 * Load the ZA and ZT state
120 *
121 * x0 - pointer to buffer for state
122 * x1 - number of ZT registers to save
123 */
124SYM_FUNC_START(sme_load_state)
125	_sme_rdsvl	2, 1		// x2 = VL/8
126	sme_load_za 0, x2, 12		// Leaves x0 pointing to the end of ZA
127
128	cbz	x1, 1f
129	_ldr_zt 0
1301:
131	ret
132SYM_FUNC_END(sme_load_state)
133
134#endif /* CONFIG_ARM64_SME */
135