xref: /linux/drivers/infiniband/hw/irdma/pble.h (revision bdd1a21b52557ea8f61d0a5dc2f77151b576eb70)
1 /* SPDX-License-Identifier: GPL-2.0 or Linux-OpenIB */
2 /* Copyright (c) 2015 - 2019 Intel Corporation */
3 #ifndef IRDMA_PBLE_H
4 #define IRDMA_PBLE_H
5 
6 #define PBLE_SHIFT		6
7 #define PBLE_PER_PAGE		512
8 #define HMC_PAGED_BP_SHIFT	12
9 #define PBLE_512_SHIFT		9
10 #define PBLE_INVALID_IDX	0xffffffff
11 
12 enum irdma_pble_level {
13 	PBLE_LEVEL_0 = 0,
14 	PBLE_LEVEL_1 = 1,
15 	PBLE_LEVEL_2 = 2,
16 };
17 
18 enum irdma_alloc_type {
19 	PBLE_NO_ALLOC	  = 0,
20 	PBLE_SD_CONTIGOUS = 1,
21 	PBLE_SD_PAGED	  = 2,
22 };
23 
24 struct irdma_chunk;
25 
26 struct irdma_pble_chunkinfo {
27 	struct irdma_chunk *pchunk;
28 	u64 bit_idx;
29 	u64 bits_used;
30 };
31 
32 struct irdma_pble_info {
33 	u64 *addr;
34 	u32 idx;
35 	u32 cnt;
36 	struct irdma_pble_chunkinfo chunkinfo;
37 };
38 
39 struct irdma_pble_level2 {
40 	struct irdma_pble_info root;
41 	struct irdma_pble_info *leaf;
42 	struct irdma_virt_mem leafmem;
43 	u32 leaf_cnt;
44 };
45 
46 struct irdma_pble_alloc {
47 	u32 total_cnt;
48 	enum irdma_pble_level level;
49 	union {
50 		struct irdma_pble_info level1;
51 		struct irdma_pble_level2 level2;
52 	};
53 };
54 
55 struct sd_pd_idx {
56 	u32 sd_idx;
57 	u32 pd_idx;
58 	u32 rel_pd_idx;
59 };
60 
61 struct irdma_add_page_info {
62 	struct irdma_chunk *chunk;
63 	struct irdma_hmc_sd_entry *sd_entry;
64 	struct irdma_hmc_info *hmc_info;
65 	struct sd_pd_idx idx;
66 	u32 pages;
67 };
68 
69 struct irdma_chunk {
70 	struct list_head list;
71 	struct irdma_dma_info dmainfo;
72 	void *bitmapbuf;
73 
74 	u32 sizeofbitmap;
75 	u64 size;
76 	void *vaddr;
77 	u64 fpm_addr;
78 	u32 pg_cnt;
79 	enum irdma_alloc_type type;
80 	struct irdma_sc_dev *dev;
81 	struct irdma_virt_mem bitmapmem;
82 	struct irdma_virt_mem chunkmem;
83 };
84 
85 struct irdma_pble_prm {
86 	struct list_head clist;
87 	spinlock_t prm_lock; /* protect prm bitmap */
88 	u64 total_pble_alloc;
89 	u64 free_pble_cnt;
90 	u8 pble_shift;
91 };
92 
93 struct irdma_hmc_pble_rsrc {
94 	u32 unallocated_pble;
95 	struct mutex pble_mutex_lock; /* protect PBLE resource */
96 	struct irdma_sc_dev *dev;
97 	u64 fpm_base_addr;
98 	u64 next_fpm_addr;
99 	struct irdma_pble_prm pinfo;
100 	u64 allocdpbles;
101 	u64 freedpbles;
102 	u32 stats_direct_sds;
103 	u32 stats_paged_sds;
104 	u64 stats_alloc_ok;
105 	u64 stats_alloc_fail;
106 	u64 stats_alloc_freed;
107 	u64 stats_lvl1;
108 	u64 stats_lvl2;
109 };
110 
111 void irdma_destroy_pble_prm(struct irdma_hmc_pble_rsrc *pble_rsrc);
112 enum irdma_status_code
113 irdma_hmc_init_pble(struct irdma_sc_dev *dev,
114 		    struct irdma_hmc_pble_rsrc *pble_rsrc);
115 void irdma_free_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
116 		     struct irdma_pble_alloc *palloc);
117 enum irdma_status_code irdma_get_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
118 				      struct irdma_pble_alloc *palloc,
119 				      u32 pble_cnt, bool level1_only);
120 enum irdma_status_code irdma_prm_add_pble_mem(struct irdma_pble_prm *pprm,
121 					      struct irdma_chunk *pchunk);
122 enum irdma_status_code
123 irdma_prm_get_pbles(struct irdma_pble_prm *pprm,
124 		    struct irdma_pble_chunkinfo *chunkinfo, u64 mem_size,
125 		    u64 **vaddr, u64 *fpm_addr);
126 void irdma_prm_return_pbles(struct irdma_pble_prm *pprm,
127 			    struct irdma_pble_chunkinfo *chunkinfo);
128 void irdma_pble_acquire_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
129 			     unsigned long *flags);
130 void irdma_pble_release_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
131 			     unsigned long *flags);
132 void irdma_pble_free_paged_mem(struct irdma_chunk *chunk);
133 enum irdma_status_code irdma_pble_get_paged_mem(struct irdma_chunk *chunk,
134 						u32 pg_cnt);
135 void irdma_prm_rem_bitmapmem(struct irdma_hw *hw, struct irdma_chunk *chunk);
136 #endif /* IRDMA_PBLE_H */
137