| 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ | 
|---|---|
| 2 | #ifndef _PROTO_MEMORY_H | 
| 3 | #define _PROTO_MEMORY_H | 
| 4 | |
| 5 | #include <net/sock.h> | 
| 6 | #include <net/hotdata.h> | 
| 7 | |
| 8 | /* 1 MB per cpu, in page units */ | 
| 9 | #define SK_MEMORY_PCPU_RESERVE (1 << (20 - PAGE_SHIFT)) | 
| 10 | |
| 11 | static inline bool sk_has_memory_pressure(const struct sock *sk) | 
| 12 | { | 
| 13 | return sk->sk_prot->memory_pressure != NULL; | 
| 14 | } | 
| 15 | |
| 16 | static inline bool | 
| 17 | proto_memory_pressure(const struct proto *prot) | 
| 18 | { | 
| 19 | if (!prot->memory_pressure) | 
| 20 | return false; | 
| 21 | return !!READ_ONCE(*prot->memory_pressure); | 
| 22 | } | 
| 23 | |
| 24 | static inline bool sk_under_global_memory_pressure(const struct sock *sk) | 
| 25 | { | 
| 26 | return proto_memory_pressure(prot: sk->sk_prot); | 
| 27 | } | 
| 28 | |
| 29 | static inline bool sk_under_memory_pressure(const struct sock *sk) | 
| 30 | { | 
| 31 | if (!sk->sk_prot->memory_pressure) | 
| 32 | return false; | 
| 33 | |
| 34 | if (mem_cgroup_sk_enabled(sk) && | 
| 35 | mem_cgroup_sk_under_memory_pressure(sk)) | 
| 36 | return true; | 
| 37 | |
| 38 | return !!READ_ONCE(*sk->sk_prot->memory_pressure); | 
| 39 | } | 
| 40 | |
| 41 | static inline long | 
| 42 | proto_memory_allocated(const struct proto *prot) | 
| 43 | { | 
| 44 | return max(0L, atomic_long_read(prot->memory_allocated)); | 
| 45 | } | 
| 46 | |
| 47 | static inline long | 
| 48 | sk_memory_allocated(const struct sock *sk) | 
| 49 | { | 
| 50 | return proto_memory_allocated(prot: sk->sk_prot); | 
| 51 | } | 
| 52 | |
| 53 | static inline void proto_memory_pcpu_drain(struct proto *proto) | 
| 54 | { | 
| 55 | int val = this_cpu_xchg(*proto->per_cpu_fw_alloc, 0); | 
| 56 | |
| 57 | if (val) | 
| 58 | atomic_long_add(i: val, v: proto->memory_allocated); | 
| 59 | } | 
| 60 | |
| 61 | static inline void | 
| 62 | sk_memory_allocated_add(const struct sock *sk, int val) | 
| 63 | { | 
| 64 | struct proto *proto = sk->sk_prot; | 
| 65 | |
| 66 | val = this_cpu_add_return(*proto->per_cpu_fw_alloc, val); | 
| 67 | |
| 68 | if (unlikely(val >= READ_ONCE(net_hotdata.sysctl_mem_pcpu_rsv))) | 
| 69 | proto_memory_pcpu_drain(proto); | 
| 70 | } | 
| 71 | |
| 72 | static inline void | 
| 73 | sk_memory_allocated_sub(const struct sock *sk, int val) | 
| 74 | { | 
| 75 | struct proto *proto = sk->sk_prot; | 
| 76 | |
| 77 | val = this_cpu_sub_return(*proto->per_cpu_fw_alloc, val); | 
| 78 | |
| 79 | if (unlikely(val <= -READ_ONCE(net_hotdata.sysctl_mem_pcpu_rsv))) | 
| 80 | proto_memory_pcpu_drain(proto); | 
| 81 | } | 
| 82 | |
| 83 | #endif /* _PROTO_MEMORY_H */ | 
| 84 | 
