1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Altivec XOR operations
4 *
5 * Copyright 2017 IBM Corp.
6 */
7
8 #include <linux/preempt.h>
9 #include <linux/export.h>
10 #include <linux/sched.h>
11 #include <asm/switch_to.h>
12 #include <asm/xor_altivec.h>
13 #include "xor_vmx.h"
14
xor_altivec_2(unsigned long bytes,unsigned long * v1_in,unsigned long * v2_in)15 void xor_altivec_2(unsigned long bytes, unsigned long *v1_in,
16 unsigned long *v2_in)
17 {
18 preempt_disable();
19 enable_kernel_altivec();
20 __xor_altivec_2(bytes, v1_in, v2_in);
21 disable_kernel_altivec();
22 preempt_enable();
23 }
24 EXPORT_SYMBOL(xor_altivec_2);
25
xor_altivec_3(unsigned long bytes,unsigned long * v1_in,unsigned long * v2_in,unsigned long * v3_in)26 void xor_altivec_3(unsigned long bytes, unsigned long *v1_in,
27 unsigned long *v2_in, unsigned long *v3_in)
28 {
29 preempt_disable();
30 enable_kernel_altivec();
31 __xor_altivec_3(bytes, v1_in, v2_in, v3_in);
32 disable_kernel_altivec();
33 preempt_enable();
34 }
35 EXPORT_SYMBOL(xor_altivec_3);
36
xor_altivec_4(unsigned long bytes,unsigned long * v1_in,unsigned long * v2_in,unsigned long * v3_in,unsigned long * v4_in)37 void xor_altivec_4(unsigned long bytes, unsigned long *v1_in,
38 unsigned long *v2_in, unsigned long *v3_in,
39 unsigned long *v4_in)
40 {
41 preempt_disable();
42 enable_kernel_altivec();
43 __xor_altivec_4(bytes, v1_in, v2_in, v3_in, v4_in);
44 disable_kernel_altivec();
45 preempt_enable();
46 }
47 EXPORT_SYMBOL(xor_altivec_4);
48
xor_altivec_5(unsigned long bytes,unsigned long * v1_in,unsigned long * v2_in,unsigned long * v3_in,unsigned long * v4_in,unsigned long * v5_in)49 void xor_altivec_5(unsigned long bytes, unsigned long *v1_in,
50 unsigned long *v2_in, unsigned long *v3_in,
51 unsigned long *v4_in, unsigned long *v5_in)
52 {
53 preempt_disable();
54 enable_kernel_altivec();
55 __xor_altivec_5(bytes, v1_in, v2_in, v3_in, v4_in, v5_in);
56 disable_kernel_altivec();
57 preempt_enable();
58 }
59 EXPORT_SYMBOL(xor_altivec_5);
60