Lines Matching full:host

20 #include <linux/mmc/host.h>
209 static void davinci_fifo_data_trans(struct mmc_davinci_host *host, in davinci_fifo_data_trans() argument
212 struct sg_mapping_iter *sgm = &host->sg_miter; in davinci_fifo_data_trans()
221 dev_err(mmc_dev(host->mmc), "ran out of sglist prematurely\n"); in davinci_fifo_data_trans()
233 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in davinci_fifo_data_trans()
235 writel(*((u32 *)p), host->base + DAVINCI_MMCDXR); in davinci_fifo_data_trans()
239 iowrite8_rep(host->base + DAVINCI_MMCDXR, p, (n & 3)); in davinci_fifo_data_trans()
244 *((u32 *)p) = readl(host->base + DAVINCI_MMCDRR); in davinci_fifo_data_trans()
248 ioread8_rep(host->base + DAVINCI_MMCDRR, p, (n & 3)); in davinci_fifo_data_trans()
254 host->bytes_left -= n; in davinci_fifo_data_trans()
257 static void mmc_davinci_start_command(struct mmc_davinci_host *host, in mmc_davinci_start_command() argument
263 dev_dbg(mmc_dev(host->mmc), "CMD%d, arg 0x%08x%s\n", in mmc_davinci_start_command()
283 host->cmd = cmd; in mmc_davinci_start_command()
304 dev_dbg(mmc_dev(host->mmc), "unknown resp_type %04x\n", in mmc_davinci_start_command()
313 if (host->do_dma) in mmc_davinci_start_command()
316 if (host->version == MMC_CTLR_VERSION_2 && host->data != NULL && in mmc_davinci_start_command()
317 host->data_dir == DAVINCI_MMC_DATADIR_READ) in mmc_davinci_start_command()
325 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) in mmc_davinci_start_command()
328 if (host->bus_mode == MMC_BUSMODE_PUSHPULL) in mmc_davinci_start_command()
332 writel(0x1FFF, host->base + DAVINCI_MMCTOR); in mmc_davinci_start_command()
336 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in mmc_davinci_start_command()
339 if (!host->do_dma) in mmc_davinci_start_command()
341 } else if (host->data_dir == DAVINCI_MMC_DATADIR_READ) { in mmc_davinci_start_command()
344 if (!host->do_dma) in mmc_davinci_start_command()
352 if (!host->do_dma && (host->data_dir == DAVINCI_MMC_DATADIR_WRITE)) in mmc_davinci_start_command()
353 davinci_fifo_data_trans(host, rw_threshold); in mmc_davinci_start_command()
355 writel(cmd->arg, host->base + DAVINCI_MMCARGHL); in mmc_davinci_start_command()
356 writel(cmd_reg, host->base + DAVINCI_MMCCMD); in mmc_davinci_start_command()
358 host->active_request = true; in mmc_davinci_start_command()
360 if (!host->do_dma && host->bytes_left <= poll_threshold) { in mmc_davinci_start_command()
363 while (host->active_request && count--) { in mmc_davinci_start_command()
364 mmc_davinci_irq(0, host); in mmc_davinci_start_command()
369 if (host->active_request) in mmc_davinci_start_command()
370 writel(im_val, host->base + DAVINCI_MMCIM); in mmc_davinci_start_command()
377 static void davinci_abort_dma(struct mmc_davinci_host *host) in davinci_abort_dma() argument
381 if (host->data_dir == DAVINCI_MMC_DATADIR_READ) in davinci_abort_dma()
382 sync_dev = host->dma_rx; in davinci_abort_dma()
384 sync_dev = host->dma_tx; in davinci_abort_dma()
389 static int mmc_davinci_send_dma_request(struct mmc_davinci_host *host, in mmc_davinci_send_dma_request() argument
396 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in mmc_davinci_send_dma_request()
399 .dst_addr = host->mem_res->start + DAVINCI_MMCDXR, in mmc_davinci_send_dma_request()
404 chan = host->dma_tx; in mmc_davinci_send_dma_request()
405 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request()
407 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request()
409 host->sg_len, in mmc_davinci_send_dma_request()
413 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_send_dma_request()
421 .src_addr = host->mem_res->start + DAVINCI_MMCDRR, in mmc_davinci_send_dma_request()
426 chan = host->dma_rx; in mmc_davinci_send_dma_request()
427 dmaengine_slave_config(host->dma_rx, &dma_rx_conf); in mmc_davinci_send_dma_request()
429 desc = dmaengine_prep_slave_sg(host->dma_rx, in mmc_davinci_send_dma_request()
431 host->sg_len, in mmc_davinci_send_dma_request()
435 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_send_dma_request()
449 static int mmc_davinci_start_dma_transfer(struct mmc_davinci_host *host, in mmc_davinci_start_dma_transfer() argument
456 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
460 for (i = 0; i < host->sg_len; i++) { in mmc_davinci_start_dma_transfer()
462 dma_unmap_sg(mmc_dev(host->mmc), in mmc_davinci_start_dma_transfer()
469 host->do_dma = 1; in mmc_davinci_start_dma_transfer()
470 ret = mmc_davinci_send_dma_request(host, data); in mmc_davinci_start_dma_transfer()
475 static void davinci_release_dma_channels(struct mmc_davinci_host *host) in davinci_release_dma_channels() argument
477 if (!host->use_dma) in davinci_release_dma_channels()
480 dma_release_channel(host->dma_tx); in davinci_release_dma_channels()
481 dma_release_channel(host->dma_rx); in davinci_release_dma_channels()
484 static int davinci_acquire_dma_channels(struct mmc_davinci_host *host) in davinci_acquire_dma_channels() argument
486 host->dma_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in davinci_acquire_dma_channels()
487 if (IS_ERR(host->dma_tx)) { in davinci_acquire_dma_channels()
488 dev_err(mmc_dev(host->mmc), "Can't get dma_tx channel\n"); in davinci_acquire_dma_channels()
489 return PTR_ERR(host->dma_tx); in davinci_acquire_dma_channels()
492 host->dma_rx = dma_request_chan(mmc_dev(host->mmc), "rx"); in davinci_acquire_dma_channels()
493 if (IS_ERR(host->dma_rx)) { in davinci_acquire_dma_channels()
494 dev_err(mmc_dev(host->mmc), "Can't get dma_rx channel\n"); in davinci_acquire_dma_channels()
495 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
496 return PTR_ERR(host->dma_rx); in davinci_acquire_dma_channels()
505 mmc_davinci_prepare_data(struct mmc_davinci_host *host, struct mmc_request *req) in mmc_davinci_prepare_data() argument
512 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_prepare_data()
515 host->data = data; in mmc_davinci_prepare_data()
517 host->data_dir = DAVINCI_MMC_DATADIR_NONE; in mmc_davinci_prepare_data()
518 writel(0, host->base + DAVINCI_MMCBLEN); in mmc_davinci_prepare_data()
519 writel(0, host->base + DAVINCI_MMCNBLK); in mmc_davinci_prepare_data()
523 dev_dbg(mmc_dev(host->mmc), "%s, %d blocks of %d bytes\n", in mmc_davinci_prepare_data()
526 dev_dbg(mmc_dev(host->mmc), " DTO %d cycles + %d ns\n", in mmc_davinci_prepare_data()
529 (data->timeout_ns / host->ns_in_one_cycle); in mmc_davinci_prepare_data()
533 writel(timeout, host->base + DAVINCI_MMCTOD); in mmc_davinci_prepare_data()
534 writel(data->blocks, host->base + DAVINCI_MMCNBLK); in mmc_davinci_prepare_data()
535 writel(data->blksz, host->base + DAVINCI_MMCBLEN); in mmc_davinci_prepare_data()
540 host->data_dir = DAVINCI_MMC_DATADIR_WRITE; in mmc_davinci_prepare_data()
542 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
544 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
547 host->data_dir = DAVINCI_MMC_DATADIR_READ; in mmc_davinci_prepare_data()
549 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
551 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
554 host->bytes_left = data->blocks * data->blksz; in mmc_davinci_prepare_data()
564 if (host->use_dma && (host->bytes_left & (rw_threshold - 1)) == 0 in mmc_davinci_prepare_data()
565 && mmc_davinci_start_dma_transfer(host, data) == 0) { in mmc_davinci_prepare_data()
567 host->bytes_left = 0; in mmc_davinci_prepare_data()
570 host->sg_len = data->sg_len; in mmc_davinci_prepare_data()
571 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in mmc_davinci_prepare_data()
577 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_request() local
585 mmcst1 = readl(host->base + DAVINCI_MMCST1); in mmc_davinci_request()
591 dev_err(mmc_dev(host->mmc), "still BUSY? bad ... \n"); in mmc_davinci_request()
597 host->do_dma = 0; in mmc_davinci_request()
598 mmc_davinci_prepare_data(host, req); in mmc_davinci_request()
599 mmc_davinci_start_command(host, req->cmd); in mmc_davinci_request()
602 static unsigned int calculate_freq_for_card(struct mmc_davinci_host *host, in calculate_freq_for_card() argument
607 mmc_pclk = host->mmc_input_clk; in calculate_freq_for_card()
621 host->ns_in_one_cycle = (1000000) / (((mmc_pclk in calculate_freq_for_card()
624 host->ns_in_one_cycle = (1000000) / (((mmc_pclk in calculate_freq_for_card()
634 struct mmc_davinci_host *host = mmc_priv(mmc); in calculate_clk_divider() local
648 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKRT_MASK; in calculate_clk_divider()
650 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
653 host->ns_in_one_cycle = (1000000) / (MMCSD_INIT_CLOCK/1000); in calculate_clk_divider()
656 mmc_push_pull_freq = calculate_freq_for_card(host, ios->clock); in calculate_clk_divider()
661 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKEN; in calculate_clk_divider()
662 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
666 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKRT_MASK; in calculate_clk_divider()
668 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
670 writel(temp | MMCCLK_CLKEN, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
678 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_set_ios() local
682 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_set_ios()
700 dev_dbg(mmc_dev(host->mmc), "Enabling 8 bit mode\n"); in mmc_davinci_set_ios()
701 writel((readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
703 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
706 dev_dbg(mmc_dev(host->mmc), "Enabling 4 bit mode\n"); in mmc_davinci_set_ios()
707 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_set_ios()
708 writel((readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
710 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
712 writel(readl(host->base + DAVINCI_MMCCTL) | in mmc_davinci_set_ios()
714 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
717 dev_dbg(mmc_dev(host->mmc), "Enabling 1 bit mode\n"); in mmc_davinci_set_ios()
718 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_set_ios()
719 writel(readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
721 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
723 writel(readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
725 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
731 host->bus_mode = ios->bus_mode; in mmc_davinci_set_ios()
737 writel(0, host->base + DAVINCI_MMCARGHL); in mmc_davinci_set_ios()
738 writel(MMCCMD_INITCK, host->base + DAVINCI_MMCCMD); in mmc_davinci_set_ios()
740 u32 tmp = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_set_ios()
749 dev_warn(mmc_dev(host->mmc), "powerup timeout\n"); in mmc_davinci_set_ios()
756 mmc_davinci_xfer_done(struct mmc_davinci_host *host, struct mmc_data *data) in mmc_davinci_xfer_done() argument
758 host->data = NULL; in mmc_davinci_xfer_done()
760 if (host->mmc->caps & MMC_CAP_SDIO_IRQ) { in mmc_davinci_xfer_done()
766 if (host->sdio_int && !(readl(host->base + DAVINCI_SDIOST0) & in mmc_davinci_xfer_done()
768 writel(SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_xfer_done()
769 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_xfer_done()
773 if (host->do_dma) { in mmc_davinci_xfer_done()
774 davinci_abort_dma(host); in mmc_davinci_xfer_done()
776 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_xfer_done()
778 host->do_dma = false; in mmc_davinci_xfer_done()
780 host->data_dir = DAVINCI_MMC_DATADIR_NONE; in mmc_davinci_xfer_done()
782 if (!data->stop || (host->cmd && host->cmd->error)) { in mmc_davinci_xfer_done()
783 mmc_request_done(host->mmc, data->mrq); in mmc_davinci_xfer_done()
784 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_xfer_done()
785 host->active_request = false; in mmc_davinci_xfer_done()
787 mmc_davinci_start_command(host, data->stop); in mmc_davinci_xfer_done()
790 static void mmc_davinci_cmd_done(struct mmc_davinci_host *host, in mmc_davinci_cmd_done() argument
793 host->cmd = NULL; in mmc_davinci_cmd_done()
798 cmd->resp[3] = readl(host->base + DAVINCI_MMCRSP01); in mmc_davinci_cmd_done()
799 cmd->resp[2] = readl(host->base + DAVINCI_MMCRSP23); in mmc_davinci_cmd_done()
800 cmd->resp[1] = readl(host->base + DAVINCI_MMCRSP45); in mmc_davinci_cmd_done()
801 cmd->resp[0] = readl(host->base + DAVINCI_MMCRSP67); in mmc_davinci_cmd_done()
804 cmd->resp[0] = readl(host->base + DAVINCI_MMCRSP67); in mmc_davinci_cmd_done()
808 if (host->data == NULL || cmd->error) { in mmc_davinci_cmd_done()
811 mmc_request_done(host->mmc, cmd->mrq); in mmc_davinci_cmd_done()
812 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_cmd_done()
813 host->active_request = false; in mmc_davinci_cmd_done()
817 static inline void mmc_davinci_reset_ctrl(struct mmc_davinci_host *host, in mmc_davinci_reset_ctrl() argument
822 temp = readl(host->base + DAVINCI_MMCCTL); in mmc_davinci_reset_ctrl()
828 writel(temp, host->base + DAVINCI_MMCCTL); in mmc_davinci_reset_ctrl()
833 davinci_abort_data(struct mmc_davinci_host *host, struct mmc_data *data) in davinci_abort_data() argument
835 mmc_davinci_reset_ctrl(host, 1); in davinci_abort_data()
836 mmc_davinci_reset_ctrl(host, 0); in davinci_abort_data()
837 if (!host->do_dma) in davinci_abort_data()
838 sg_miter_stop(&host->sg_miter); in davinci_abort_data()
843 struct mmc_davinci_host *host = dev_id; in mmc_davinci_sdio_irq() local
846 status = readl(host->base + DAVINCI_SDIOIST); in mmc_davinci_sdio_irq()
848 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_sdio_irq()
850 writel(status | SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_sdio_irq()
851 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_sdio_irq()
858 struct mmc_davinci_host *host = (struct mmc_davinci_host *)dev_id; in mmc_davinci_irq() local
862 struct mmc_data *data = host->data; in mmc_davinci_irq()
864 if (host->cmd == NULL && host->data == NULL) { in mmc_davinci_irq()
865 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
866 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
869 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
873 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
883 if (host->bytes_left && (status & (MMCST0_DXRDY | MMCST0_DRRDY))) { in mmc_davinci_irq()
893 im_val = readl(host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
894 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
897 davinci_fifo_data_trans(host, rw_threshold); in mmc_davinci_irq()
898 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
900 } while (host->bytes_left && in mmc_davinci_irq()
909 writel(im_val, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
915 if (!host->do_dma) { in mmc_davinci_irq()
916 if (host->bytes_left > 0) in mmc_davinci_irq()
920 davinci_fifo_data_trans(host, host->bytes_left); in mmc_davinci_irq()
921 sg_miter_stop(&host->sg_miter); in mmc_davinci_irq()
926 dev_err(mmc_dev(host->mmc), in mmc_davinci_irq()
927 "DATDNE with no host->data\n"); in mmc_davinci_irq()
936 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
940 davinci_abort_data(host, data); in mmc_davinci_irq()
955 u32 temp = readb(host->base + DAVINCI_MMCDRSP); in mmc_davinci_irq()
960 dev_dbg(mmc_dev(host->mmc), "data %s %s error\n", in mmc_davinci_irq()
964 davinci_abort_data(host, data); in mmc_davinci_irq()
969 if (host->cmd) { in mmc_davinci_irq()
970 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
972 host->cmd->opcode, qstatus); in mmc_davinci_irq()
973 host->cmd->error = -ETIMEDOUT; in mmc_davinci_irq()
976 davinci_abort_data(host, data); in mmc_davinci_irq()
984 dev_dbg(mmc_dev(host->mmc), "Command CRC error\n"); in mmc_davinci_irq()
985 if (host->cmd) { in mmc_davinci_irq()
986 host->cmd->error = -EILSEQ; in mmc_davinci_irq()
993 end_command = host->cmd ? 1 : 0; in mmc_davinci_irq()
997 mmc_davinci_cmd_done(host, host->cmd); in mmc_davinci_irq()
999 mmc_davinci_xfer_done(host, data); in mmc_davinci_irq()
1027 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_enable_sdio_irq() local
1030 if (!(readl(host->base + DAVINCI_SDIOST0) & SDIOST0_DAT1_HI)) { in mmc_davinci_enable_sdio_irq()
1031 writel(SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_enable_sdio_irq()
1032 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_enable_sdio_irq()
1034 host->sdio_int = true; in mmc_davinci_enable_sdio_irq()
1035 writel(readl(host->base + DAVINCI_SDIOIEN) | in mmc_davinci_enable_sdio_irq()
1036 SDIOIEN_IOINTEN, host->base + DAVINCI_SDIOIEN); in mmc_davinci_enable_sdio_irq()
1039 host->sdio_int = false; in mmc_davinci_enable_sdio_irq()
1040 writel(readl(host->base + DAVINCI_SDIOIEN) & ~SDIOIEN_IOINTEN, in mmc_davinci_enable_sdio_irq()
1041 host->base + DAVINCI_SDIOIEN); in mmc_davinci_enable_sdio_irq()
1059 struct mmc_davinci_host *host; in mmc_davinci_cpufreq_transition() local
1064 host = container_of(nb, struct mmc_davinci_host, freq_transition); in mmc_davinci_cpufreq_transition()
1065 mmc = host->mmc; in mmc_davinci_cpufreq_transition()
1066 mmc_pclk = clk_get_rate(host->clk); in mmc_davinci_cpufreq_transition()
1070 host->mmc_input_clk = mmc_pclk; in mmc_davinci_cpufreq_transition()
1078 static inline int mmc_davinci_cpufreq_register(struct mmc_davinci_host *host) in mmc_davinci_cpufreq_register() argument
1080 host->freq_transition.notifier_call = mmc_davinci_cpufreq_transition; in mmc_davinci_cpufreq_register()
1082 return cpufreq_register_notifier(&host->freq_transition, in mmc_davinci_cpufreq_register()
1086 static inline void mmc_davinci_cpufreq_deregister(struct mmc_davinci_host *host) in mmc_davinci_cpufreq_deregister() argument
1088 cpufreq_unregister_notifier(&host->freq_transition, in mmc_davinci_cpufreq_deregister()
1092 static inline int mmc_davinci_cpufreq_register(struct mmc_davinci_host *host) in mmc_davinci_cpufreq_register() argument
1097 static inline void mmc_davinci_cpufreq_deregister(struct mmc_davinci_host *host) in mmc_davinci_cpufreq_deregister() argument
1101 static void init_mmcsd_host(struct mmc_davinci_host *host) in init_mmcsd_host() argument
1104 mmc_davinci_reset_ctrl(host, 1); in init_mmcsd_host()
1106 writel(0, host->base + DAVINCI_MMCCLK); in init_mmcsd_host()
1107 writel(MMCCLK_CLKEN, host->base + DAVINCI_MMCCLK); in init_mmcsd_host()
1109 writel(0x1FFF, host->base + DAVINCI_MMCTOR); in init_mmcsd_host()
1110 writel(0xFFFF, host->base + DAVINCI_MMCTOD); in init_mmcsd_host()
1112 mmc_davinci_reset_ctrl(host, 0); in init_mmcsd_host()
1144 struct mmc_davinci_host *host; in mmc_davinci_parse_pdata() local
1150 host = mmc_priv(mmc); in mmc_davinci_parse_pdata()
1151 if (!host) in mmc_davinci_parse_pdata()
1155 host->nr_sg = pdata->nr_sg - 1; in mmc_davinci_parse_pdata()
1186 struct mmc_davinci_host *host = NULL; in davinci_mmcsd_probe() local
1206 mmc = devm_mmc_alloc_host(&pdev->dev, sizeof(*host)); in davinci_mmcsd_probe()
1210 host = mmc_priv(mmc); in davinci_mmcsd_probe()
1211 host->mmc = mmc; /* Important */ in davinci_mmcsd_probe()
1213 host->mem_res = mem; in davinci_mmcsd_probe()
1214 host->base = devm_ioremap(&pdev->dev, mem->start, mem_size); in davinci_mmcsd_probe()
1215 if (!host->base) in davinci_mmcsd_probe()
1218 host->clk = devm_clk_get(&pdev->dev, NULL); in davinci_mmcsd_probe()
1219 if (IS_ERR(host->clk)) in davinci_mmcsd_probe()
1220 return PTR_ERR(host->clk); in davinci_mmcsd_probe()
1222 ret = clk_prepare_enable(host->clk); in davinci_mmcsd_probe()
1226 host->mmc_input_clk = clk_get_rate(host->clk); in davinci_mmcsd_probe()
1244 if (host->nr_sg > MAX_NR_SG || !host->nr_sg) in davinci_mmcsd_probe()
1245 host->nr_sg = MAX_NR_SG; in davinci_mmcsd_probe()
1247 init_mmcsd_host(host); in davinci_mmcsd_probe()
1249 host->use_dma = use_dma; in davinci_mmcsd_probe()
1250 host->mmc_irq = irq; in davinci_mmcsd_probe()
1251 host->sdio_irq = platform_get_irq_optional(pdev, 1); in davinci_mmcsd_probe()
1253 if (host->use_dma) { in davinci_mmcsd_probe()
1254 ret = davinci_acquire_dma_channels(host); in davinci_mmcsd_probe()
1258 host->use_dma = 0; in davinci_mmcsd_probe()
1265 host->version = id_entry->driver_data; in davinci_mmcsd_probe()
1284 dev_dbg(mmc_dev(host->mmc), "max_segs=%d\n", mmc->max_segs); in davinci_mmcsd_probe()
1285 dev_dbg(mmc_dev(host->mmc), "max_blk_size=%d\n", mmc->max_blk_size); in davinci_mmcsd_probe()
1286 dev_dbg(mmc_dev(host->mmc), "max_req_size=%d\n", mmc->max_req_size); in davinci_mmcsd_probe()
1287 dev_dbg(mmc_dev(host->mmc), "max_seg_size=%d\n", mmc->max_seg_size); in davinci_mmcsd_probe()
1289 platform_set_drvdata(pdev, host); in davinci_mmcsd_probe()
1291 ret = mmc_davinci_cpufreq_register(host); in davinci_mmcsd_probe()
1302 mmc_hostname(mmc), host); in davinci_mmcsd_probe()
1306 if (host->sdio_irq >= 0) { in davinci_mmcsd_probe()
1307 ret = devm_request_irq(&pdev->dev, host->sdio_irq, in davinci_mmcsd_probe()
1309 mmc_hostname(mmc), host); in davinci_mmcsd_probe()
1322 dev_info(mmc_dev(host->mmc), "Using %s, %d-bit mode\n", in davinci_mmcsd_probe()
1323 host->use_dma ? "DMA" : "PIO", bus_width); in davinci_mmcsd_probe()
1330 mmc_davinci_cpufreq_deregister(host); in davinci_mmcsd_probe()
1332 davinci_release_dma_channels(host); in davinci_mmcsd_probe()
1335 clk_disable_unprepare(host->clk); in davinci_mmcsd_probe()
1342 struct mmc_davinci_host *host = platform_get_drvdata(pdev); in davinci_mmcsd_remove() local
1344 mmc_remove_host(host->mmc); in davinci_mmcsd_remove()
1345 mmc_davinci_cpufreq_deregister(host); in davinci_mmcsd_remove()
1346 davinci_release_dma_channels(host); in davinci_mmcsd_remove()
1347 clk_disable_unprepare(host->clk); in davinci_mmcsd_remove()
1353 struct mmc_davinci_host *host = dev_get_drvdata(dev); in davinci_mmcsd_suspend() local
1355 writel(0, host->base + DAVINCI_MMCIM); in davinci_mmcsd_suspend()
1356 mmc_davinci_reset_ctrl(host, 1); in davinci_mmcsd_suspend()
1357 clk_disable(host->clk); in davinci_mmcsd_suspend()
1364 struct mmc_davinci_host *host = dev_get_drvdata(dev); in davinci_mmcsd_resume() local
1367 ret = clk_enable(host->clk); in davinci_mmcsd_resume()
1371 mmc_davinci_reset_ctrl(host, 0); in davinci_mmcsd_resume()