Lines Matching defs:mhi_dev
280 struct mhi_device *mhi_dev;
287 mhi_dev = to_mhi_device(dev);
288 mhi_cntrl = mhi_dev->mhi_cntrl;
291 if (mhi_dev->dev_type == MHI_DEVICE_CONTROLLER)
294 ul_chan = mhi_dev->ul_chan;
295 dl_chan = mhi_dev->dl_chan;
308 * references to mhi_dev created for ul and dl channels. We can
309 * be sure that there will be no instances of mhi_dev left after
316 put_device(&ul_chan->mhi_dev->dev);
323 put_device(&dl_chan->mhi_dev->dev);
326 dev_dbg(&mhi_cntrl->mhi_dev->dev, "destroy device for chan:%s\n",
327 mhi_dev->name);
336 int mhi_get_free_desc_count(struct mhi_device *mhi_dev,
339 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
341 mhi_dev->ul_chan : mhi_dev->dl_chan;
348 void mhi_notify(struct mhi_device *mhi_dev, enum mhi_callback cb_reason)
352 if (!mhi_dev->dev.driver)
355 mhi_drv = to_mhi_driver(mhi_dev->dev.driver);
358 mhi_drv->status_cb(mhi_dev, cb_reason);
366 struct mhi_device *mhi_dev;
367 struct device *dev = &mhi_cntrl->mhi_dev->dev;
372 if (!mhi_chan->configured || mhi_chan->mhi_dev ||
375 mhi_dev = mhi_alloc_device(mhi_cntrl);
376 if (IS_ERR(mhi_dev))
379 mhi_dev->dev_type = MHI_DEVICE_XFER;
382 mhi_dev->ul_chan = mhi_chan;
383 mhi_dev->ul_chan_id = mhi_chan->chan;
387 mhi_dev->dl_chan = mhi_chan;
388 mhi_dev->dl_chan_id = mhi_chan->chan;
392 put_device(&mhi_dev->dev);
396 get_device(&mhi_dev->dev);
397 mhi_chan->mhi_dev = mhi_dev;
405 mhi_dev->ul_chan = mhi_chan;
406 mhi_dev->ul_chan_id = mhi_chan->chan;
408 mhi_dev->dl_chan = mhi_chan;
409 mhi_dev->dl_chan_id = mhi_chan->chan;
411 get_device(&mhi_dev->dev);
412 mhi_chan->mhi_dev = mhi_dev;
417 mhi_dev->name = mhi_chan->name;
418 dev_set_name(&mhi_dev->dev, "%s_%s",
419 dev_name(&mhi_cntrl->mhi_dev->dev),
420 mhi_dev->name);
423 if (mhi_dev->dl_chan && mhi_dev->dl_chan->wake_capable)
424 device_init_wakeup(&mhi_dev->dev, true);
426 ret = device_add(&mhi_dev->dev);
428 put_device(&mhi_dev->dev);
447 dev_dbg(&mhi_cntrl->mhi_dev->dev,
456 dev_err(&mhi_cntrl->mhi_dev->dev,
470 struct mhi_device *mhi_dev = mhi_chan->mhi_dev;
472 if (mhi_dev)
473 mhi_notify(mhi_dev, MHI_CB_PENDING_DATA);
484 struct device *dev = &mhi_cntrl->mhi_dev->dev;
573 struct device *dev = &mhi_cntrl->mhi_dev->dev;
611 dev_err(&mhi_cntrl->mhi_dev->dev,
630 dev_err(&mhi_cntrl->mhi_dev->dev,
659 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
673 if (mhi_queue_buf(mhi_chan->mhi_dev,
756 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
792 dev_err(&mhi_cntrl->mhi_dev->dev,
809 dev_err(&mhi_cntrl->mhi_dev->dev,
825 struct device *dev = &mhi_cntrl->mhi_dev->dev;
839 dev_err(&mhi_cntrl->mhi_dev->dev,
970 dev_err(&mhi_cntrl->mhi_dev->dev,
1006 dev_err(&mhi_cntrl->mhi_dev->dev,
1045 dev_err(&mhi_cntrl->mhi_dev->dev,
1078 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1131 static int mhi_queue(struct mhi_device *mhi_dev, struct mhi_buf_info *buf_info,
1134 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1135 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1136 mhi_dev->dl_chan;
1177 int mhi_queue_skb(struct mhi_device *mhi_dev, enum dma_data_direction dir,
1180 struct mhi_chan *mhi_chan = (dir == DMA_TO_DEVICE) ? mhi_dev->ul_chan :
1181 mhi_dev->dl_chan;
1191 return mhi_queue(mhi_dev, &buf_info, dir, mflags);
1254 int mhi_queue_buf(struct mhi_device *mhi_dev, enum dma_data_direction dir,
1263 return mhi_queue(mhi_dev, &buf_info, dir, mflags);
1267 bool mhi_queue_is_full(struct mhi_device *mhi_dev, enum dma_data_direction dir)
1269 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1271 mhi_dev->ul_chan : mhi_dev->dl_chan;
1285 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1335 struct device *dev = &mhi_chan->mhi_dev->dev;
1374 ret = mhi_device_get_sync(mhi_cntrl->mhi_dev);
1409 mhi_device_put(mhi_cntrl->mhi_dev);
1418 struct device *dev = &mhi_chan->mhi_dev->dev;
1453 struct device *dev = &mhi_chan->mhi_dev->dev;
1542 struct device *dev = &mhi_cntrl->mhi_dev->dev;
1555 dev_err(&mhi_cntrl->mhi_dev->dev,
1607 mhi_chan->xfer_cb(mhi_chan->mhi_dev, &result);
1633 static int __mhi_prepare_for_transfer(struct mhi_device *mhi_dev, unsigned int flags)
1636 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1640 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1653 mhi_chan = dir ? mhi_dev->dl_chan : mhi_dev->ul_chan;
1663 int mhi_prepare_for_transfer(struct mhi_device *mhi_dev)
1665 return __mhi_prepare_for_transfer(mhi_dev, 0);
1669 int mhi_prepare_for_transfer_autoqueue(struct mhi_device *mhi_dev)
1671 return __mhi_prepare_for_transfer(mhi_dev, MHI_CH_INBOUND_ALLOC_BUFS);
1675 void mhi_unprepare_from_transfer(struct mhi_device *mhi_dev)
1677 struct mhi_controller *mhi_cntrl = mhi_dev->mhi_cntrl;
1682 mhi_chan = dir ? mhi_dev->ul_chan : mhi_dev->dl_chan;
1693 struct device *dev = &mhi_cntrl->mhi_dev->dev;