1 /* SPDX-License-Identifier: BSD-3-Clause */
2 /* Copyright (c) 2020 Linaro Limited */
3 // Copyright 2019 The Fuchsia Authors. All rights reserved.
4 // Use of this source code is governed by a BSD-style license that can be
5 // found in the LICENSE file.
6 
7 /*
8  * Content of LICENSE file mentioned above:
9 Copyright 2019 The Fuchsia Authors. All rights reserved.
10 Redistribution and use in source and binary forms, with or without
11 modification, are permitted provided that the following conditions are
12 met:
13    * Redistributions of source code must retain the above copyright
14 notice, this list of conditions and the following disclaimer.
15    * Redistributions in binary form must reproduce the above
16 copyright notice, this list of conditions and the following disclaimer
17 in the documentation and/or other materials provided with the
18 distribution.
19    * Neither the name of Google Inc. nor the names of its
20 contributors may be used to endorse or promote products derived from
21 this software without specific prior written permission.
22 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
23 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
24 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
25 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
26 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
27 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
28 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
29 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
30 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
31 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
32 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33  */
34 #ifndef FBL_CONFINE_ARRAY_INDEX_H_
35 #define FBL_CONFINE_ARRAY_INDEX_H_
36 
37 #include <stddef.h>
38 
39 // confine_array_index() bounds-checks and sanitizes an array index safely in the presence of
40 // speculative execution information leak bugs such as Spectre V1. confine_array_index() always
41 // returns a sanitized index, even in speculative-path execution.
42 //
43 // Callers need to combine confine_array_index with a conventional bounds check; the bounds
44 // check will return any necessary errors in the nonspeculative path, confine_array_index will
45 // confine indexes in the speculative path.
46 //
47 // Use:
48 // confine_array_index() returns |index|, if it is < size, or 0 if |index| is >= size.
49 //
50 // Example (may leak table1 contents):
51 //  1: int lookup3(size_t index) {
52 //  2:   if (index >= table1_size) {
53 //  3:     return -1;
54 //  4:   }
55 //  5:   size_t index2 = table1[index];
56 //  6:   return table2[index2];
57 //  7: }
58 //
59 // Converted:
60 //
61 //  1: int lookup3(size_t index) {
62 //  2:   if (index >= table1_size) {
63 //  3:     return -1;
64 //  4:   }
65 //  5:   size_t safe_index = confine_array_index(index, table1_size);
66 //  6:   size_t index2 = table1[safe_index];
67 //  7:   return table2[index2];
68 //  8: }
69 #ifdef __aarch64__
confine_array_index(size_t index,size_t size)70 static inline size_t confine_array_index(size_t index, size_t size) {
71   size_t safe_index;
72   // Use a conditional select and a CSDB barrier to enforce validation of |index|.
73   // See "Cache Speculation Side-channels" whitepaper, section "Software Mitigation".
74   // "" The combination of both a conditional select/conditional move and the new barrier are
75   // sufficient to address this problem on ALL Arm implementations... ""
76   asm(
77     "cmp %1, %2\n"  // %1 holds the unsanitized index
78     "csel %0, %1, xzr, lo\n"  // Select index or zero based on carry (%1 within range)
79     "hint #20\n" // csdb
80   : "=r"(safe_index)
81   : "r"(index), "r"(size)
82   : "cc");
83   return safe_index;
84 }
85 #endif
86 #ifdef __arm__
confine_array_index(size_t index,size_t size)87 static inline size_t confine_array_index(size_t index, size_t size)
88 {
89 	size_t ret_val = index;
90 
91 	/*
92 	 * For the ARMv7/AArch32 case we're basing the select and barrier
93 	 * code on __load_no_speculate1() in <speculation_barrier.h> as we
94 	 * lack the csel instruction.
95 	 */
96 
97 #ifdef __thumb2__
98       asm volatile (
99 	".syntax unified\n"
100 	"cmp	%0, %1\n"
101 	"it	cs\n"
102 #ifdef __clang__
103 #pragma clang diagnostic push
104 	/* Avoid 'deprecated instruction in IT block [-Werror,-Winline-asm]' */
105 #pragma clang diagnostic ignored "-Winline-asm"
106 #endif
107 	"movcs	%0, #0\n"
108 #ifdef __clang__
109 #pragma clang diagnostic pop
110 #endif
111 	".inst.n 0xf3af\t@ CSDB\n"
112 	".inst.n 0x8014\t@ CSDB"
113 	: "+r" (ret_val) : "r" (size) : "cc");
114 #else
115       asm volatile (
116 	".syntax unified\n"
117 	"cmp	%0, %1\n" /* %0 holds the unsanitized index */
118 	"movcs	%0, #0\n"
119 	".inst	0xe320f014\t@ CSDB"
120 	: "+r" (ret_val) : "r" (size) : "cc");
121 #endif
122 
123 	return ret_val;
124 }
125 #endif /* __arm__ */
126 
127 #ifdef __x86_64__
confine_array_index(size_t index,size_t size)128 static inline size_t confine_array_index(size_t index, size_t size) {
129   size_t safe_index = 0;
130   // Use a conditional move to enforce validation of |index|.
131   // The conditional move has a data dependency on the result of a comparison and cannot
132   // execute until the comparison is resolved.
133   // See "Software Techniques for Managing Speculation on AMD Processors", Mitigation V1-2.
134   // See "Analyzing potential bounds check bypass vulnerabilities", Revision 002,
135   //   Section 5.2 Bounds clipping
136   __asm__(
137     "cmp %1, %2\n"
138     "cmova %1, %0\n"  // Select between $0 and |index|
139   : "+r"(safe_index)
140   : "r"(index), "r"(size)
141   : "cc");
142   return safe_index;
143 }
144 #endif
145 #endif  // FBL_CONFINE_ARRAY_INDEX_H_
146