Lines Matching refs:map_flags

132 	return !(htab->map.map_flags & BPF_F_NO_PREALLOC);  in htab_is_prealloc()
338 htab->map.map_flags & BPF_F_NO_COMMON_LRU, in prealloc_init()
410 bool percpu_lru = (attr->map_flags & BPF_F_NO_COMMON_LRU); in htab_map_alloc_check()
411 bool prealloc = !(attr->map_flags & BPF_F_NO_PREALLOC); in htab_map_alloc_check()
412 bool zero_seed = (attr->map_flags & BPF_F_ZERO_SEED); in htab_map_alloc_check()
430 if (attr->map_flags & ~HTAB_CREATE_FLAG_MASK || in htab_map_alloc_check()
431 !bpf_map_flags_access_ok(attr->map_flags)) in htab_map_alloc_check()
473 bool percpu_lru = (attr->map_flags & BPF_F_NO_COMMON_LRU); in htab_map_alloc()
474 bool prealloc = !(attr->map_flags & BPF_F_NO_PREALLOC); in htab_map_alloc()
530 if (htab->map.map_flags & BPF_F_ZERO_SEED) in htab_map_alloc()
1005 u64 map_flags) in check_flags() argument
1007 if (l_old && (map_flags & ~BPF_F_LOCK) == BPF_NOEXIST) in check_flags()
1011 if (!l_old && (map_flags & ~BPF_F_LOCK) == BPF_EXIST) in check_flags()
1020 u64 map_flags) in htab_map_update_elem() argument
1030 if (unlikely((map_flags & ~BPF_F_LOCK) > BPF_EXIST)) in htab_map_update_elem()
1044 if (unlikely(map_flags & BPF_F_LOCK)) { in htab_map_update_elem()
1050 ret = check_flags(htab, l_old, map_flags); in htab_map_update_elem()
1072 ret = check_flags(htab, l_old, map_flags); in htab_map_update_elem()
1076 if (unlikely(l_old && (map_flags & BPF_F_LOCK))) { in htab_map_update_elem()
1122 u64 map_flags) in htab_lru_map_update_elem() argument
1132 if (unlikely(map_flags > BPF_EXIST)) in htab_lru_map_update_elem()
1163 ret = check_flags(htab, l_old, map_flags); in htab_lru_map_update_elem()
1189 void *value, u64 map_flags, in __htab_percpu_map_update_elem() argument
1200 if (unlikely(map_flags > BPF_EXIST)) in __htab_percpu_map_update_elem()
1220 ret = check_flags(htab, l_old, map_flags); in __htab_percpu_map_update_elem()
1244 void *value, u64 map_flags, in __htab_lru_percpu_map_update_elem() argument
1255 if (unlikely(map_flags > BPF_EXIST)) in __htab_lru_percpu_map_update_elem()
1274 if (map_flags != BPF_EXIST) { in __htab_lru_percpu_map_update_elem()
1286 ret = check_flags(htab, l_old, map_flags); in __htab_lru_percpu_map_update_elem()
1311 void *value, u64 map_flags) in htab_percpu_map_update_elem() argument
1313 return __htab_percpu_map_update_elem(map, key, value, map_flags, false); in htab_percpu_map_update_elem()
1317 void *value, u64 map_flags) in htab_lru_percpu_map_update_elem() argument
1319 return __htab_lru_percpu_map_update_elem(map, key, value, map_flags, in htab_lru_percpu_map_update_elem()
1599 u64 elem_map_flags, map_flags; in __htab_map_lookup_and_delete_batch() local
1613 map_flags = attr->batch.flags; in __htab_map_lookup_and_delete_batch()
1614 if (map_flags) in __htab_map_lookup_and_delete_batch()
2209 u64 map_flags) in bpf_percpu_hash_update() argument
2217 map_flags, true); in bpf_percpu_hash_update()
2219 ret = __htab_percpu_map_update_elem(map, key, value, map_flags, in bpf_percpu_hash_update()
2345 void *key, void *value, u64 map_flags) in bpf_fd_htab_map_update_elem() argument
2355 ret = htab_map_update_elem(map, key, &ptr, map_flags); in bpf_fd_htab_map_update_elem()