Lines Matching full:qce
40 static void qce_unregister_algs(struct qce_device *qce) in qce_unregister_algs() argument
47 ops->unregister_algs(qce); in qce_unregister_algs()
51 static int qce_register_algs(struct qce_device *qce) in qce_register_algs() argument
58 ret = ops->register_algs(qce); in qce_register_algs()
83 static int qce_handle_queue(struct qce_device *qce, in qce_handle_queue() argument
90 spin_lock_irqsave(&qce->lock, flags); in qce_handle_queue()
93 ret = crypto_enqueue_request(&qce->queue, req); in qce_handle_queue()
96 if (qce->req) { in qce_handle_queue()
97 spin_unlock_irqrestore(&qce->lock, flags); in qce_handle_queue()
101 backlog = crypto_get_backlog(&qce->queue); in qce_handle_queue()
102 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue()
104 qce->req = async_req; in qce_handle_queue()
106 spin_unlock_irqrestore(&qce->lock, flags); in qce_handle_queue()
112 spin_lock_bh(&qce->lock); in qce_handle_queue()
114 spin_unlock_bh(&qce->lock); in qce_handle_queue()
119 qce->result = err; in qce_handle_queue()
120 tasklet_schedule(&qce->done_tasklet); in qce_handle_queue()
128 struct qce_device *qce = (struct qce_device *)data; in qce_tasklet_req_done() local
132 spin_lock_irqsave(&qce->lock, flags); in qce_tasklet_req_done()
133 req = qce->req; in qce_tasklet_req_done()
134 qce->req = NULL; in qce_tasklet_req_done()
135 spin_unlock_irqrestore(&qce->lock, flags); in qce_tasklet_req_done()
138 crypto_request_complete(req, qce->result); in qce_tasklet_req_done()
140 qce_handle_queue(qce, NULL); in qce_tasklet_req_done()
143 static int qce_async_request_enqueue(struct qce_device *qce, in qce_async_request_enqueue() argument
146 return qce_handle_queue(qce, req); in qce_async_request_enqueue()
149 static void qce_async_request_done(struct qce_device *qce, int ret) in qce_async_request_done() argument
151 qce->result = ret; in qce_async_request_done()
152 tasklet_schedule(&qce->done_tasklet); in qce_async_request_done()
155 static int qce_check_version(struct qce_device *qce) in qce_check_version() argument
159 qce_get_version(qce, &major, &minor, &step); in qce_check_version()
168 qce->burst_size = QCE_BAM_BURST_SIZE; in qce_check_version()
183 qce->pipe_pair_id = qce->dma.rxchan->chan_id >> 1; in qce_check_version()
185 dev_dbg(qce->dev, "Crypto device found, version %d.%d.%d\n", in qce_check_version()
194 struct qce_device *qce; in qce_crypto_probe() local
197 qce = devm_kzalloc(dev, sizeof(*qce), GFP_KERNEL); in qce_crypto_probe()
198 if (!qce) in qce_crypto_probe()
201 qce->dev = dev; in qce_crypto_probe()
202 platform_set_drvdata(pdev, qce); in qce_crypto_probe()
204 qce->base = devm_platform_ioremap_resource(pdev, 0); in qce_crypto_probe()
205 if (IS_ERR(qce->base)) in qce_crypto_probe()
206 return PTR_ERR(qce->base); in qce_crypto_probe()
212 qce->core = devm_clk_get_optional(qce->dev, "core"); in qce_crypto_probe()
213 if (IS_ERR(qce->core)) in qce_crypto_probe()
214 return PTR_ERR(qce->core); in qce_crypto_probe()
216 qce->iface = devm_clk_get_optional(qce->dev, "iface"); in qce_crypto_probe()
217 if (IS_ERR(qce->iface)) in qce_crypto_probe()
218 return PTR_ERR(qce->iface); in qce_crypto_probe()
220 qce->bus = devm_clk_get_optional(qce->dev, "bus"); in qce_crypto_probe()
221 if (IS_ERR(qce->bus)) in qce_crypto_probe()
222 return PTR_ERR(qce->bus); in qce_crypto_probe()
224 qce->mem_path = devm_of_icc_get(qce->dev, "memory"); in qce_crypto_probe()
225 if (IS_ERR(qce->mem_path)) in qce_crypto_probe()
226 return PTR_ERR(qce->mem_path); in qce_crypto_probe()
228 ret = icc_set_bw(qce->mem_path, QCE_DEFAULT_MEM_BANDWIDTH, QCE_DEFAULT_MEM_BANDWIDTH); in qce_crypto_probe()
232 ret = clk_prepare_enable(qce->core); in qce_crypto_probe()
236 ret = clk_prepare_enable(qce->iface); in qce_crypto_probe()
240 ret = clk_prepare_enable(qce->bus); in qce_crypto_probe()
244 ret = qce_dma_request(qce->dev, &qce->dma); in qce_crypto_probe()
248 ret = qce_check_version(qce); in qce_crypto_probe()
252 spin_lock_init(&qce->lock); in qce_crypto_probe()
253 tasklet_init(&qce->done_tasklet, qce_tasklet_req_done, in qce_crypto_probe()
254 (unsigned long)qce); in qce_crypto_probe()
255 crypto_init_queue(&qce->queue, QCE_QUEUE_LENGTH); in qce_crypto_probe()
257 qce->async_req_enqueue = qce_async_request_enqueue; in qce_crypto_probe()
258 qce->async_req_done = qce_async_request_done; in qce_crypto_probe()
260 ret = qce_register_algs(qce); in qce_crypto_probe()
267 qce_dma_release(&qce->dma); in qce_crypto_probe()
269 clk_disable_unprepare(qce->bus); in qce_crypto_probe()
271 clk_disable_unprepare(qce->iface); in qce_crypto_probe()
273 clk_disable_unprepare(qce->core); in qce_crypto_probe()
275 icc_set_bw(qce->mem_path, 0, 0); in qce_crypto_probe()
282 struct qce_device *qce = platform_get_drvdata(pdev); in qce_crypto_remove() local
284 tasklet_kill(&qce->done_tasklet); in qce_crypto_remove()
285 qce_unregister_algs(qce); in qce_crypto_remove()
286 qce_dma_release(&qce->dma); in qce_crypto_remove()
287 clk_disable_unprepare(qce->bus); in qce_crypto_remove()
288 clk_disable_unprepare(qce->iface); in qce_crypto_remove()
289 clk_disable_unprepare(qce->core); in qce_crypto_remove()
295 { .compatible = "qcom,qce", },