1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Copyright (c) 2020, Oracle and/or its affiliates
4 */
5
6 #ifndef SVC_RDMA_PCL_H
7 #define SVC_RDMA_PCL_H
8
9 #include <linux/list.h>
10
11 struct svc_rdma_segment {
12 u32 rs_handle;
13 u32 rs_length;
14 u64 rs_offset;
15 };
16
17 struct svc_rdma_chunk {
18 struct list_head ch_list;
19
20 u32 ch_position;
21 u32 ch_length;
22 u32 ch_payload_length;
23
24 u32 ch_segcount;
25 struct svc_rdma_segment ch_segments[];
26 };
27
28 struct svc_rdma_pcl {
29 unsigned int cl_count;
30 struct list_head cl_chunks;
31 };
32
33 /**
34 * pcl_init - Initialize a parsed chunk list
35 * @pcl: parsed chunk list to initialize
36 *
37 */
pcl_init(struct svc_rdma_pcl * pcl)38 static inline void pcl_init(struct svc_rdma_pcl *pcl)
39 {
40 INIT_LIST_HEAD(&pcl->cl_chunks);
41 }
42
43 /**
44 * pcl_is_empty - Return true if parsed chunk list is empty
45 * @pcl: parsed chunk list
46 *
47 */
pcl_is_empty(const struct svc_rdma_pcl * pcl)48 static inline bool pcl_is_empty(const struct svc_rdma_pcl *pcl)
49 {
50 return list_empty(&pcl->cl_chunks);
51 }
52
53 /**
54 * pcl_first_chunk - Return first chunk in a parsed chunk list
55 * @pcl: parsed chunk list
56 *
57 * Returns the first chunk in the list, or NULL if the list is empty.
58 */
59 static inline struct svc_rdma_chunk *
pcl_first_chunk(const struct svc_rdma_pcl * pcl)60 pcl_first_chunk(const struct svc_rdma_pcl *pcl)
61 {
62 if (pcl_is_empty(pcl))
63 return NULL;
64 return list_first_entry(&pcl->cl_chunks, struct svc_rdma_chunk,
65 ch_list);
66 }
67
68 /**
69 * pcl_next_chunk - Return next chunk in a parsed chunk list
70 * @pcl: a parsed chunk list
71 * @chunk: chunk in @pcl
72 *
73 * Returns the next chunk in the list, or NULL if @chunk is already last.
74 */
75 static inline struct svc_rdma_chunk *
pcl_next_chunk(const struct svc_rdma_pcl * pcl,struct svc_rdma_chunk * chunk)76 pcl_next_chunk(const struct svc_rdma_pcl *pcl, struct svc_rdma_chunk *chunk)
77 {
78 if (list_is_last(&chunk->ch_list, &pcl->cl_chunks))
79 return NULL;
80 return list_next_entry(chunk, ch_list);
81 }
82
83 /**
84 * pcl_for_each_chunk - Iterate over chunks in a parsed chunk list
85 * @pos: the loop cursor
86 * @pcl: a parsed chunk list
87 */
88 #define pcl_for_each_chunk(pos, pcl) \
89 for (pos = list_first_entry(&(pcl)->cl_chunks, struct svc_rdma_chunk, ch_list); \
90 &pos->ch_list != &(pcl)->cl_chunks; \
91 pos = list_next_entry(pos, ch_list))
92
93 /**
94 * pcl_for_each_segment - Iterate over segments in a parsed chunk
95 * @pos: the loop cursor
96 * @chunk: a parsed chunk
97 */
98 #define pcl_for_each_segment(pos, chunk) \
99 for (pos = &(chunk)->ch_segments[0]; \
100 pos <= &(chunk)->ch_segments[(chunk)->ch_segcount - 1]; \
101 pos++)
102
103 /**
104 * pcl_chunk_end_offset - Return offset of byte range following @chunk
105 * @chunk: chunk in @pcl
106 *
107 * Returns starting offset of the region just after @chunk
108 */
109 static inline unsigned int
pcl_chunk_end_offset(const struct svc_rdma_chunk * chunk)110 pcl_chunk_end_offset(const struct svc_rdma_chunk *chunk)
111 {
112 return xdr_align_size(chunk->ch_position + chunk->ch_payload_length);
113 }
114
115 struct svc_rdma_recv_ctxt;
116
117 extern void pcl_free(struct svc_rdma_pcl *pcl);
118 extern bool pcl_alloc_call(struct svc_rdma_recv_ctxt *rctxt, __be32 *p);
119 extern bool pcl_alloc_read(struct svc_rdma_recv_ctxt *rctxt, __be32 *p);
120 extern bool pcl_alloc_write(struct svc_rdma_recv_ctxt *rctxt,
121 struct svc_rdma_pcl *pcl, __be32 *p);
122 extern int pcl_process_nonpayloads(const struct svc_rdma_pcl *pcl,
123 const struct xdr_buf *xdr,
124 int (*actor)(const struct xdr_buf *,
125 void *),
126 void *data);
127
128 #endif /* SVC_RDMA_PCL_H */
129