xref: /linux/drivers/i3c/master/mipi-i3c-hci/hci.h (revision 9a48d4a130871bea7a7ae2d83cda0326b1922d3e)
1 /* SPDX-License-Identifier: BSD-3-Clause */
2 /*
3  * Copyright (c) 2020, MIPI Alliance, Inc.
4  *
5  * Author: Nicolas Pitre <npitre@baylibre.com>
6  *
7  * Common HCI stuff
8  */
9 
10 #ifndef HCI_H
11 #define HCI_H
12 
13 #include <linux/io.h>
14 
15 /* 32-bit word aware bit and mask macros */
16 #define W0_MASK(h, l)  GENMASK((h) - 0,  (l) - 0)
17 #define W1_MASK(h, l)  GENMASK((h) - 32, (l) - 32)
18 #define W2_MASK(h, l)  GENMASK((h) - 64, (l) - 64)
19 #define W3_MASK(h, l)  GENMASK((h) - 96, (l) - 96)
20 
21 /* Same for single bit macros (trailing _ to align with W*_MASK width) */
22 #define W0_BIT_(x)  BIT((x) - 0)
23 #define W1_BIT_(x)  BIT((x) - 32)
24 #define W2_BIT_(x)  BIT((x) - 64)
25 #define W3_BIT_(x)  BIT((x) - 96)
26 
27 #define reg_read(r)		readl(hci->base_regs + (r))
28 #define reg_write(r, v)		writel(v, hci->base_regs + (r))
29 #define reg_set(r, v)		reg_write(r, reg_read(r) | (v))
30 #define reg_clear(r, v)		reg_write(r, reg_read(r) & ~(v))
31 
32 struct hci_cmd_ops;
33 
34 struct dat_words {
35 	u32 w0;
36 	u32 w1;
37 };
38 
39 /* Our main structure */
40 struct i3c_hci {
41 	struct i3c_master_controller master;
42 	void __iomem *base_regs;
43 	void __iomem *DAT_regs;
44 	void __iomem *DCT_regs;
45 	void __iomem *RHS_regs;
46 	void __iomem *PIO_regs;
47 	void __iomem *EXTCAPS_regs;
48 	void __iomem *AUTOCMD_regs;
49 	void __iomem *DEBUG_regs;
50 	const struct hci_io_ops *io;
51 	void *io_data;
52 	const struct hci_cmd_ops *cmd;
53 	spinlock_t lock;
54 	struct mutex control_mutex;
55 	atomic_t next_cmd_tid;
56 	bool irq_inactive;
57 	u32 caps;
58 	unsigned int quirks;
59 	unsigned int DAT_entries;
60 	unsigned int DAT_entry_size;
61 	void *DAT_data;
62 	struct dat_words *DAT;
63 	unsigned int DCT_entries;
64 	unsigned int DCT_entry_size;
65 	u8 version_major;
66 	u8 version_minor;
67 	u8 revision;
68 	u8 dyn_addr;
69 	u32 vendor_mipi_id;
70 	u32 vendor_version_id;
71 	u32 vendor_product_id;
72 	void *vendor_data;
73 };
74 
75 /*
76  * Structure to represent a master initiated transfer.
77  * The rnw, data and data_len fields must be initialized before calling any
78  * hci->cmd->*() method. The cmd method will initialize cmd_desc[] and
79  * possibly modify (clear) the data field. Then xfer->cmd_desc[0] can
80  * be augmented with CMD_0_ROC and/or CMD_0_TOC.
81  * The completion field needs to be initialized before queueing with
82  * hci->io->queue_xfer(), and requires CMD_0_ROC to be set.
83  */
84 struct hci_xfer {
85 	u32 cmd_desc[4];
86 	u32 response;
87 	bool rnw;
88 	void *data;
89 	unsigned int data_len;
90 	unsigned int cmd_tid;
91 	struct completion *completion;
92 	unsigned long timeout;
93 	union {
94 		struct {
95 			/* PIO specific */
96 			struct hci_xfer *next_xfer;
97 			struct hci_xfer *next_data;
98 			struct hci_xfer *next_resp;
99 			unsigned int data_left;
100 			u32 data_word_before_partial;
101 		};
102 		struct {
103 			/* DMA specific */
104 			struct i3c_dma *dma;
105 			int ring_number;
106 			int ring_entry;
107 		};
108 	};
109 };
110 
hci_alloc_xfer(unsigned int n)111 static inline struct hci_xfer *hci_alloc_xfer(unsigned int n)
112 {
113 	return kzalloc_objs(struct hci_xfer, n);
114 }
115 
hci_free_xfer(struct hci_xfer * xfer,unsigned int n)116 static inline void hci_free_xfer(struct hci_xfer *xfer, unsigned int n)
117 {
118 	kfree(xfer);
119 }
120 
121 /* This abstracts PIO vs DMA operations */
122 struct hci_io_ops {
123 	bool (*irq_handler)(struct i3c_hci *hci);
124 	int (*queue_xfer)(struct i3c_hci *hci, struct hci_xfer *xfer, int n);
125 	bool (*dequeue_xfer)(struct i3c_hci *hci, struct hci_xfer *xfer, int n);
126 	int (*handle_error)(struct i3c_hci *hci, struct hci_xfer *xfer, int n);
127 	int (*request_ibi)(struct i3c_hci *hci, struct i3c_dev_desc *dev,
128 			   const struct i3c_ibi_setup *req);
129 	void (*free_ibi)(struct i3c_hci *hci, struct i3c_dev_desc *dev);
130 	void (*recycle_ibi_slot)(struct i3c_hci *hci, struct i3c_dev_desc *dev,
131 				struct i3c_ibi_slot *slot);
132 	int (*init)(struct i3c_hci *hci);
133 	void (*cleanup)(struct i3c_hci *hci);
134 	void (*suspend)(struct i3c_hci *hci);
135 	void (*resume)(struct i3c_hci *hci);
136 };
137 
138 extern const struct hci_io_ops mipi_i3c_hci_pio;
139 extern const struct hci_io_ops mipi_i3c_hci_dma;
140 
141 /* Our per device master private data */
142 struct i3c_hci_dev_data {
143 	int dat_idx;
144 	void *ibi_data;
145 };
146 
147 /* list of quirks */
148 #define HCI_QUIRK_RAW_CCC	BIT(1)	/* CCC framing must be explicit */
149 #define HCI_QUIRK_PIO_MODE	BIT(2)  /* Set PIO mode for AMD platforms */
150 #define HCI_QUIRK_OD_PP_TIMING		BIT(3)  /* Set OD and PP timings for AMD platforms */
151 #define HCI_QUIRK_RESP_BUF_THLD		BIT(4)  /* Set resp buf thld to 0 for AMD platforms */
152 #define HCI_QUIRK_RPM_ALLOWED		BIT(5)  /* Runtime PM allowed */
153 
154 /* global functions */
155 void mipi_i3c_hci_resume(struct i3c_hci *hci);
156 void mipi_i3c_hci_pio_reset(struct i3c_hci *hci);
157 void mipi_i3c_hci_dct_index_reset(struct i3c_hci *hci);
158 void amd_set_od_pp_timing(struct i3c_hci *hci);
159 void amd_set_resp_buf_thld(struct i3c_hci *hci);
160 void i3c_hci_sync_irq_inactive(struct i3c_hci *hci);
161 int i3c_hci_process_xfer(struct i3c_hci *hci, struct hci_xfer *xfer, int n);
162 
163 #endif
164