Lines Matching +full:ufs +full:- +full:phy
1 // SPDX-License-Identifier: GPL-2.0-only
3 * UFS Host Controller driver for Exynos specific extensions
5 * Copyright (C) 2014-2015 Samsung Electronics Co., Ltd.
13 #include <linux/arm-smccc.h>
20 #include <linux/phy/phy.h>
24 #include <ufs/ufshcd.h>
25 #include "ufshcd-pltfrm.h"
26 #include <ufs/ufshci.h>
27 #include <ufs/unipro.h>
29 #include "ufs-exynos.h"
89 /* FSYS UFS Shareability */
95 /* Multi-host registers */
136 UNIPRO_L1_5 = 0,/* PHY Adapter */
154 * UFS Protector registers
165 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en);
166 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en);
168 static inline void exynos_ufs_enable_auto_ctrl_hcc(struct exynos_ufs *ufs) in exynos_ufs_enable_auto_ctrl_hcc() argument
170 exynos_ufs_auto_ctrl_hcc(ufs, true); in exynos_ufs_enable_auto_ctrl_hcc()
173 static inline void exynos_ufs_disable_auto_ctrl_hcc(struct exynos_ufs *ufs) in exynos_ufs_disable_auto_ctrl_hcc() argument
175 exynos_ufs_auto_ctrl_hcc(ufs, false); in exynos_ufs_disable_auto_ctrl_hcc()
179 struct exynos_ufs *ufs, u32 *val) in exynos_ufs_disable_auto_ctrl_hcc_save() argument
181 *val = hci_readl(ufs, HCI_MISC); in exynos_ufs_disable_auto_ctrl_hcc_save()
182 exynos_ufs_auto_ctrl_hcc(ufs, false); in exynos_ufs_disable_auto_ctrl_hcc_save()
186 struct exynos_ufs *ufs, u32 *val) in exynos_ufs_auto_ctrl_hcc_restore() argument
188 hci_writel(ufs, *val, HCI_MISC); in exynos_ufs_auto_ctrl_hcc_restore()
191 static inline void exynos_ufs_gate_clks(struct exynos_ufs *ufs) in exynos_ufs_gate_clks() argument
193 exynos_ufs_ctrl_clkstop(ufs, true); in exynos_ufs_gate_clks()
196 static inline void exynos_ufs_ungate_clks(struct exynos_ufs *ufs) in exynos_ufs_ungate_clks() argument
198 exynos_ufs_ctrl_clkstop(ufs, false); in exynos_ufs_ungate_clks()
201 static int exynos7_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs) in exynos7_ufs_drv_init() argument
206 static int exynosauto_ufs_drv_init(struct device *dev, struct exynos_ufs *ufs) in exynosauto_ufs_drv_init() argument
208 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynosauto_ufs_drv_init()
211 if (ufs->sysreg) { in exynosauto_ufs_drv_init()
212 return regmap_update_bits(ufs->sysreg, in exynosauto_ufs_drv_init()
213 ufs->shareability_reg_offset, in exynosauto_ufs_drv_init()
217 attr->tx_dif_p_nsec = 3200000; in exynosauto_ufs_drv_init()
222 static int exynosauto_ufs_post_hce_enable(struct exynos_ufs *ufs) in exynosauto_ufs_post_hce_enable() argument
224 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_post_hce_enable()
229 hci_writel(ufs, ALLOW_TRANS_VH_DEFAULT, HCI_MH_ALLOWABLE_TRAN_OF_VH); in exynosauto_ufs_post_hce_enable()
231 hci_writel(ufs, 0x1, HCI_MH_IID_IN_TASK_TAG); in exynosauto_ufs_post_hce_enable()
236 static int exynosauto_ufs_pre_link(struct exynos_ufs *ufs) in exynosauto_ufs_pre_link() argument
238 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_pre_link()
242 rx_line_reset_period = (RX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC; in exynosauto_ufs_pre_link()
243 tx_line_reset_period = (TX_LINE_RESET_TIME * ufs->mclk_rate) / NSEC_PER_MSEC; in exynosauto_ufs_pre_link()
246 for_each_ufs_rx_lane(ufs, i) { in exynosauto_ufs_pre_link()
248 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in exynosauto_ufs_pre_link()
263 for_each_ufs_tx_lane(ufs, i) { in exynosauto_ufs_pre_link()
265 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in exynosauto_ufs_pre_link()
290 static int exynosauto_ufs_pre_pwr_change(struct exynos_ufs *ufs, in exynosauto_ufs_pre_pwr_change() argument
293 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_pre_pwr_change()
303 static int exynosauto_ufs_post_pwr_change(struct exynos_ufs *ufs, in exynosauto_ufs_post_pwr_change() argument
306 struct ufs_hba *hba = ufs->hba; in exynosauto_ufs_post_pwr_change()
317 static int exynos7_ufs_pre_link(struct exynos_ufs *ufs) in exynos7_ufs_pre_link() argument
319 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos7_ufs_pre_link()
320 u32 val = attr->pa_dbg_opt_suite1_val; in exynos7_ufs_pre_link()
321 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_pre_link()
325 for_each_ufs_tx_lane(ufs, i) in exynos7_ufs_pre_link()
327 for_each_ufs_rx_lane(ufs, i) { in exynos7_ufs_pre_link()
333 for_each_ufs_tx_lane(ufs, i) in exynos7_ufs_pre_link()
338 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_opt_suite1_off), in exynos7_ufs_pre_link()
344 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_opt_suite1_off), val); in exynos7_ufs_pre_link()
349 static int exynos7_ufs_post_link(struct exynos_ufs *ufs) in exynos7_ufs_post_link() argument
351 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_post_link()
355 for_each_ufs_tx_lane(ufs, i) { in exynos7_ufs_post_link()
359 TX_LINERESET_N(exynos_ufs_calc_time_cntr(ufs, 200000))); in exynos7_ufs_post_link()
370 static int exynos7_ufs_pre_pwr_change(struct exynos_ufs *ufs, in exynos7_ufs_pre_pwr_change() argument
373 unipro_writel(ufs, 0x22, UNIPRO_DBG_FORCE_DME_CTRL_STATE); in exynos7_ufs_pre_pwr_change()
378 static int exynos7_ufs_post_pwr_change(struct exynos_ufs *ufs, in exynos7_ufs_post_pwr_change() argument
381 struct ufs_hba *hba = ufs->hba; in exynos7_ufs_post_pwr_change()
382 int lanes = max_t(u32, pwr->lane_rx, pwr->lane_tx); in exynos7_ufs_post_pwr_change()
396 * exynos_ufs_auto_ctrl_hcc - HCI core clock control by h/w
398 * - Before host controller S/W reset
399 * - Access to UFS protector's register
401 static void exynos_ufs_auto_ctrl_hcc(struct exynos_ufs *ufs, bool en) in exynos_ufs_auto_ctrl_hcc() argument
403 u32 misc = hci_readl(ufs, HCI_MISC); in exynos_ufs_auto_ctrl_hcc()
406 hci_writel(ufs, misc | HCI_CORECLK_CTRL_EN, HCI_MISC); in exynos_ufs_auto_ctrl_hcc()
408 hci_writel(ufs, misc & ~HCI_CORECLK_CTRL_EN, HCI_MISC); in exynos_ufs_auto_ctrl_hcc()
411 static void exynos_ufs_ctrl_clkstop(struct exynos_ufs *ufs, bool en) in exynos_ufs_ctrl_clkstop() argument
413 u32 ctrl = hci_readl(ufs, HCI_CLKSTOP_CTRL); in exynos_ufs_ctrl_clkstop()
414 u32 misc = hci_readl(ufs, HCI_MISC); in exynos_ufs_ctrl_clkstop()
417 hci_writel(ufs, misc | CLK_CTRL_EN_MASK, HCI_MISC); in exynos_ufs_ctrl_clkstop()
418 hci_writel(ufs, ctrl | CLK_STOP_MASK, HCI_CLKSTOP_CTRL); in exynos_ufs_ctrl_clkstop()
420 hci_writel(ufs, ctrl & ~CLK_STOP_MASK, HCI_CLKSTOP_CTRL); in exynos_ufs_ctrl_clkstop()
421 hci_writel(ufs, misc & ~CLK_CTRL_EN_MASK, HCI_MISC); in exynos_ufs_ctrl_clkstop()
425 static int exynos_ufs_get_clk_info(struct exynos_ufs *ufs) in exynos_ufs_get_clk_info() argument
427 struct ufs_hba *hba = ufs->hba; in exynos_ufs_get_clk_info()
428 struct list_head *head = &hba->clk_list_head; in exynos_ufs_get_clk_info()
439 if (!IS_ERR(clki->clk)) { in exynos_ufs_get_clk_info()
440 if (!strcmp(clki->name, "core_clk")) in exynos_ufs_get_clk_info()
441 ufs->clk_hci_core = clki->clk; in exynos_ufs_get_clk_info()
442 else if (!strcmp(clki->name, "sclk_unipro_main")) in exynos_ufs_get_clk_info()
443 ufs->clk_unipro_main = clki->clk; in exynos_ufs_get_clk_info()
447 if (!ufs->clk_hci_core || !ufs->clk_unipro_main) { in exynos_ufs_get_clk_info()
448 dev_err(hba->dev, "failed to get clk info\n"); in exynos_ufs_get_clk_info()
449 ret = -EINVAL; in exynos_ufs_get_clk_info()
453 ufs->mclk_rate = clk_get_rate(ufs->clk_unipro_main); in exynos_ufs_get_clk_info()
454 pclk_rate = clk_get_rate(ufs->clk_hci_core); in exynos_ufs_get_clk_info()
455 f_min = ufs->pclk_avail_min; in exynos_ufs_get_clk_info()
456 f_max = ufs->pclk_avail_max; in exynos_ufs_get_clk_info()
458 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) { in exynos_ufs_get_clk_info()
469 dev_err(hba->dev, "not available pclk range %lu\n", pclk_rate); in exynos_ufs_get_clk_info()
470 ret = -EINVAL; in exynos_ufs_get_clk_info()
474 ufs->pclk_rate = pclk_rate; in exynos_ufs_get_clk_info()
475 ufs->pclk_div = div; in exynos_ufs_get_clk_info()
481 static void exynos_ufs_set_unipro_pclk_div(struct exynos_ufs *ufs) in exynos_ufs_set_unipro_pclk_div() argument
483 if (ufs->opts & EXYNOS_UFS_OPT_HAS_APB_CLK_CTRL) { in exynos_ufs_set_unipro_pclk_div()
486 val = hci_readl(ufs, HCI_UNIPRO_APB_CLK_CTRL); in exynos_ufs_set_unipro_pclk_div()
487 hci_writel(ufs, UNIPRO_APB_CLK(val, ufs->pclk_div), in exynos_ufs_set_unipro_pclk_div()
492 static void exynos_ufs_set_pwm_clk_div(struct exynos_ufs *ufs) in exynos_ufs_set_pwm_clk_div() argument
494 struct ufs_hba *hba = ufs->hba; in exynos_ufs_set_pwm_clk_div()
495 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_set_pwm_clk_div()
498 UIC_ARG_MIB(CMN_PWM_CLK_CTRL), attr->cmn_pwm_clk_ctrl); in exynos_ufs_set_pwm_clk_div()
501 static void exynos_ufs_calc_pwm_clk_div(struct exynos_ufs *ufs) in exynos_ufs_calc_pwm_clk_div() argument
503 struct ufs_hba *hba = ufs->hba; in exynos_ufs_calc_pwm_clk_div()
504 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_calc_pwm_clk_div()
510 int i = 0, clk_idx = -1; in exynos_ufs_calc_pwm_clk_div()
512 clk_period = UNIPRO_PCLK_PERIOD(ufs); in exynos_ufs_calc_pwm_clk_div()
523 if (clk_idx == -1) { in exynos_ufs_calc_pwm_clk_div()
525 dev_err(hba->dev, in exynos_ufs_calc_pwm_clk_div()
529 attr->cmn_pwm_clk_ctrl = clk_idx & PWM_CLK_CTRL_MASK; in exynos_ufs_calc_pwm_clk_div()
532 long exynos_ufs_calc_time_cntr(struct exynos_ufs *ufs, long period) in exynos_ufs_calc_time_cntr() argument
535 long pclk_rate = ufs->pclk_rate; in exynos_ufs_calc_time_cntr()
538 clk_period = UNIPRO_PCLK_PERIOD(ufs); in exynos_ufs_calc_time_cntr()
544 static void exynos_ufs_specify_phy_time_attr(struct exynos_ufs *ufs) in exynos_ufs_specify_phy_time_attr() argument
546 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_specify_phy_time_attr()
547 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg; in exynos_ufs_specify_phy_time_attr()
549 t_cfg->tx_linereset_p = in exynos_ufs_specify_phy_time_attr()
550 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_p_nsec); in exynos_ufs_specify_phy_time_attr()
551 t_cfg->tx_linereset_n = in exynos_ufs_specify_phy_time_attr()
552 exynos_ufs_calc_time_cntr(ufs, attr->tx_dif_n_nsec); in exynos_ufs_specify_phy_time_attr()
553 t_cfg->tx_high_z_cnt = in exynos_ufs_specify_phy_time_attr()
554 exynos_ufs_calc_time_cntr(ufs, attr->tx_high_z_cnt_nsec); in exynos_ufs_specify_phy_time_attr()
555 t_cfg->tx_base_n_val = in exynos_ufs_specify_phy_time_attr()
556 exynos_ufs_calc_time_cntr(ufs, attr->tx_base_unit_nsec); in exynos_ufs_specify_phy_time_attr()
557 t_cfg->tx_gran_n_val = in exynos_ufs_specify_phy_time_attr()
558 exynos_ufs_calc_time_cntr(ufs, attr->tx_gran_unit_nsec); in exynos_ufs_specify_phy_time_attr()
559 t_cfg->tx_sleep_cnt = in exynos_ufs_specify_phy_time_attr()
560 exynos_ufs_calc_time_cntr(ufs, attr->tx_sleep_cnt); in exynos_ufs_specify_phy_time_attr()
562 t_cfg->rx_linereset = in exynos_ufs_specify_phy_time_attr()
563 exynos_ufs_calc_time_cntr(ufs, attr->rx_dif_p_nsec); in exynos_ufs_specify_phy_time_attr()
564 t_cfg->rx_hibern8_wait = in exynos_ufs_specify_phy_time_attr()
565 exynos_ufs_calc_time_cntr(ufs, attr->rx_hibern8_wait_nsec); in exynos_ufs_specify_phy_time_attr()
566 t_cfg->rx_base_n_val = in exynos_ufs_specify_phy_time_attr()
567 exynos_ufs_calc_time_cntr(ufs, attr->rx_base_unit_nsec); in exynos_ufs_specify_phy_time_attr()
568 t_cfg->rx_gran_n_val = in exynos_ufs_specify_phy_time_attr()
569 exynos_ufs_calc_time_cntr(ufs, attr->rx_gran_unit_nsec); in exynos_ufs_specify_phy_time_attr()
570 t_cfg->rx_sleep_cnt = in exynos_ufs_specify_phy_time_attr()
571 exynos_ufs_calc_time_cntr(ufs, attr->rx_sleep_cnt); in exynos_ufs_specify_phy_time_attr()
572 t_cfg->rx_stall_cnt = in exynos_ufs_specify_phy_time_attr()
573 exynos_ufs_calc_time_cntr(ufs, attr->rx_stall_cnt); in exynos_ufs_specify_phy_time_attr()
576 static void exynos_ufs_config_phy_time_attr(struct exynos_ufs *ufs) in exynos_ufs_config_phy_time_attr() argument
578 struct ufs_hba *hba = ufs->hba; in exynos_ufs_config_phy_time_attr()
579 struct ufs_phy_time_cfg *t_cfg = &ufs->t_cfg; in exynos_ufs_config_phy_time_attr()
582 exynos_ufs_set_pwm_clk_div(ufs); in exynos_ufs_config_phy_time_attr()
586 for_each_ufs_rx_lane(ufs, i) { in exynos_ufs_config_phy_time_attr()
588 ufs->drv_data->uic_attr->rx_filler_enable); in exynos_ufs_config_phy_time_attr()
590 RX_LINERESET(t_cfg->rx_linereset)); in exynos_ufs_config_phy_time_attr()
592 RX_BASE_NVAL_L(t_cfg->rx_base_n_val)); in exynos_ufs_config_phy_time_attr()
594 RX_BASE_NVAL_H(t_cfg->rx_base_n_val)); in exynos_ufs_config_phy_time_attr()
596 RX_GRAN_NVAL_L(t_cfg->rx_gran_n_val)); in exynos_ufs_config_phy_time_attr()
598 RX_GRAN_NVAL_H(t_cfg->rx_gran_n_val)); in exynos_ufs_config_phy_time_attr()
600 RX_OV_SLEEP_CNT(t_cfg->rx_sleep_cnt)); in exynos_ufs_config_phy_time_attr()
602 RX_OV_STALL_CNT(t_cfg->rx_stall_cnt)); in exynos_ufs_config_phy_time_attr()
605 for_each_ufs_tx_lane(ufs, i) { in exynos_ufs_config_phy_time_attr()
607 TX_LINERESET_P(t_cfg->tx_linereset_p)); in exynos_ufs_config_phy_time_attr()
609 TX_HIGH_Z_CNT_L(t_cfg->tx_high_z_cnt)); in exynos_ufs_config_phy_time_attr()
611 TX_HIGH_Z_CNT_H(t_cfg->tx_high_z_cnt)); in exynos_ufs_config_phy_time_attr()
613 TX_BASE_NVAL_L(t_cfg->tx_base_n_val)); in exynos_ufs_config_phy_time_attr()
615 TX_BASE_NVAL_H(t_cfg->tx_base_n_val)); in exynos_ufs_config_phy_time_attr()
617 TX_GRAN_NVAL_L(t_cfg->tx_gran_n_val)); in exynos_ufs_config_phy_time_attr()
619 TX_GRAN_NVAL_H(t_cfg->tx_gran_n_val)); in exynos_ufs_config_phy_time_attr()
622 TX_OV_SLEEP_CNT(t_cfg->tx_sleep_cnt)); in exynos_ufs_config_phy_time_attr()
624 ufs->drv_data->uic_attr->tx_min_activatetime); in exynos_ufs_config_phy_time_attr()
630 static void exynos_ufs_config_phy_cap_attr(struct exynos_ufs *ufs) in exynos_ufs_config_phy_cap_attr() argument
632 struct ufs_hba *hba = ufs->hba; in exynos_ufs_config_phy_cap_attr()
633 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_config_phy_cap_attr()
638 for_each_ufs_rx_lane(ufs, i) { in exynos_ufs_config_phy_cap_attr()
641 attr->rx_hs_g1_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
644 attr->rx_hs_g2_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
647 attr->rx_hs_g3_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
650 attr->rx_hs_g1_prep_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
653 attr->rx_hs_g2_prep_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
656 attr->rx_hs_g3_prep_sync_len_cap); in exynos_ufs_config_phy_cap_attr()
659 if (attr->rx_adv_fine_gran_sup_en == 0) { in exynos_ufs_config_phy_cap_attr()
660 for_each_ufs_rx_lane(ufs, i) { in exynos_ufs_config_phy_cap_attr()
664 if (attr->rx_min_actv_time_cap) in exynos_ufs_config_phy_cap_attr()
668 attr->rx_min_actv_time_cap); in exynos_ufs_config_phy_cap_attr()
670 if (attr->rx_hibern8_time_cap) in exynos_ufs_config_phy_cap_attr()
673 attr->rx_hibern8_time_cap); in exynos_ufs_config_phy_cap_attr()
675 } else if (attr->rx_adv_fine_gran_sup_en == 1) { in exynos_ufs_config_phy_cap_attr()
676 for_each_ufs_rx_lane(ufs, i) { in exynos_ufs_config_phy_cap_attr()
677 if (attr->rx_adv_fine_gran_step) in exynos_ufs_config_phy_cap_attr()
681 attr->rx_adv_fine_gran_step)); in exynos_ufs_config_phy_cap_attr()
683 if (attr->rx_adv_min_actv_time_cap) in exynos_ufs_config_phy_cap_attr()
687 attr->rx_adv_min_actv_time_cap); in exynos_ufs_config_phy_cap_attr()
689 if (attr->rx_adv_hibern8_time_cap) in exynos_ufs_config_phy_cap_attr()
693 attr->rx_adv_hibern8_time_cap); in exynos_ufs_config_phy_cap_attr()
700 static void exynos_ufs_establish_connt(struct exynos_ufs *ufs) in exynos_ufs_establish_connt() argument
702 struct ufs_hba *hba = ufs->hba; in exynos_ufs_establish_connt()
723 static void exynos_ufs_config_smu(struct exynos_ufs *ufs) in exynos_ufs_config_smu() argument
727 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val); in exynos_ufs_config_smu()
730 reg = ufsp_readl(ufs, UFSPRSECURITY); in exynos_ufs_config_smu()
731 ufsp_writel(ufs, reg | NSSMU, UFSPRSECURITY); in exynos_ufs_config_smu()
732 ufsp_writel(ufs, 0x0, UFSPSBEGIN0); in exynos_ufs_config_smu()
733 ufsp_writel(ufs, 0xffffffff, UFSPSEND0); in exynos_ufs_config_smu()
734 ufsp_writel(ufs, 0xff, UFSPSLUN0); in exynos_ufs_config_smu()
735 ufsp_writel(ufs, 0xf1, UFSPSCTRL0); in exynos_ufs_config_smu()
737 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val); in exynos_ufs_config_smu()
740 static void exynos_ufs_config_sync_pattern_mask(struct exynos_ufs *ufs, in exynos_ufs_config_sync_pattern_mask() argument
743 struct ufs_hba *hba = ufs->hba; in exynos_ufs_config_sync_pattern_mask()
744 u8 g = max_t(u32, pwr->gear_rx, pwr->gear_tx); in exynos_ufs_config_sync_pattern_mask()
762 mask = exynos_ufs_calc_time_cntr(ufs, sync_len); in exynos_ufs_config_sync_pattern_mask()
767 for_each_ufs_rx_lane(ufs, i) in exynos_ufs_config_sync_pattern_mask()
778 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_pre_pwr_mode() local
779 struct phy *generic_phy = ufs->phy; in exynos_ufs_pre_pwr_mode()
785 ret = -EINVAL; in exynos_ufs_pre_pwr_mode()
797 if (ufs->drv_data->pre_pwr_change) in exynos_ufs_pre_pwr_mode()
798 ufs->drv_data->pre_pwr_change(ufs, dev_req_params); in exynos_ufs_pre_pwr_mode()
801 exynos_ufs_config_sync_pattern_mask(ufs, dev_req_params); in exynos_ufs_pre_pwr_mode()
803 switch (dev_req_params->hs_rate) { in exynos_ufs_pre_pwr_mode()
825 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_post_pwr_mode() local
826 struct phy *generic_phy = ufs->phy; in exynos_ufs_post_pwr_mode()
827 int gear = max_t(u32, pwr_req->gear_rx, pwr_req->gear_tx); in exynos_ufs_post_pwr_mode()
828 int lanes = max_t(u32, pwr_req->lane_rx, pwr_req->lane_tx); in exynos_ufs_post_pwr_mode()
838 if (ufs->drv_data->post_pwr_change) in exynos_ufs_post_pwr_mode()
839 ufs->drv_data->post_pwr_change(ufs, pwr_req); in exynos_ufs_post_pwr_mode()
842 switch (pwr_req->hs_rate) { in exynos_ufs_post_pwr_mode()
850 "FAST", pwr_req->hs_rate == PA_HS_MODE_A ? "A" : "B", in exynos_ufs_post_pwr_mode()
857 dev_info(hba->dev, "Power mode changed to : %s\n", pwr_str); in exynos_ufs_post_pwr_mode()
865 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_specify_nexus_t_xfer_req() local
868 type = hci_readl(ufs, HCI_UTRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_xfer_req()
871 hci_writel(ufs, type | (1 << tag), HCI_UTRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_xfer_req()
873 hci_writel(ufs, type & ~(1 << tag), HCI_UTRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_xfer_req()
879 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_specify_nexus_t_tm_req() local
882 type = hci_readl(ufs, HCI_UTMRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_tm_req()
887 hci_writel(ufs, type | (1 << tag), HCI_UTMRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_tm_req()
893 hci_writel(ufs, type & ~(1 << tag), HCI_UTMRL_NEXUS_TYPE); in exynos_ufs_specify_nexus_t_tm_req()
898 static int exynos_ufs_phy_init(struct exynos_ufs *ufs) in exynos_ufs_phy_init() argument
900 struct ufs_hba *hba = ufs->hba; in exynos_ufs_phy_init()
901 struct phy *generic_phy = ufs->phy; in exynos_ufs_phy_init()
904 if (ufs->avail_ln_rx == 0 || ufs->avail_ln_tx == 0) { in exynos_ufs_phy_init()
906 &ufs->avail_ln_rx); in exynos_ufs_phy_init()
908 &ufs->avail_ln_tx); in exynos_ufs_phy_init()
909 WARN(ufs->avail_ln_rx != ufs->avail_ln_tx, in exynos_ufs_phy_init()
911 ufs->avail_ln_rx, ufs->avail_ln_tx); in exynos_ufs_phy_init()
914 phy_set_bus_width(generic_phy, ufs->avail_ln_rx); in exynos_ufs_phy_init()
917 dev_err(hba->dev, "%s: phy init failed, ret = %d\n", in exynos_ufs_phy_init()
934 static void exynos_ufs_config_unipro(struct exynos_ufs *ufs) in exynos_ufs_config_unipro() argument
936 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_config_unipro()
937 struct ufs_hba *hba = ufs->hba; in exynos_ufs_config_unipro()
939 if (attr->pa_dbg_clk_period_off) in exynos_ufs_config_unipro()
940 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_clk_period_off), in exynos_ufs_config_unipro()
941 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in exynos_ufs_config_unipro()
944 ufs->drv_data->uic_attr->tx_trailingclks); in exynos_ufs_config_unipro()
946 if (attr->pa_dbg_opt_suite1_off) in exynos_ufs_config_unipro()
947 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_opt_suite1_off), in exynos_ufs_config_unipro()
948 attr->pa_dbg_opt_suite1_val); in exynos_ufs_config_unipro()
950 if (attr->pa_dbg_opt_suite2_off) in exynos_ufs_config_unipro()
951 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_opt_suite2_off), in exynos_ufs_config_unipro()
952 attr->pa_dbg_opt_suite2_val); in exynos_ufs_config_unipro()
955 static void exynos_ufs_config_intr(struct exynos_ufs *ufs, u32 errs, u8 index) in exynos_ufs_config_intr() argument
959 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_PA_LAYER); in exynos_ufs_config_intr()
962 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DL_LAYER); in exynos_ufs_config_intr()
965 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_N_LAYER); in exynos_ufs_config_intr()
968 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_T_LAYER); in exynos_ufs_config_intr()
971 hci_writel(ufs, DFES_ERR_EN | errs, HCI_ERR_EN_DME_LAYER); in exynos_ufs_config_intr()
979 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_setup_clocks() local
981 if (!ufs) in exynos_ufs_setup_clocks()
985 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL) in exynos_ufs_setup_clocks()
986 exynos_ufs_disable_auto_ctrl_hcc(ufs); in exynos_ufs_setup_clocks()
987 exynos_ufs_ungate_clks(ufs); in exynos_ufs_setup_clocks()
989 exynos_ufs_gate_clks(ufs); in exynos_ufs_setup_clocks()
990 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL) in exynos_ufs_setup_clocks()
991 exynos_ufs_enable_auto_ctrl_hcc(ufs); in exynos_ufs_setup_clocks()
999 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_pre_link() local
1002 exynos_ufs_config_intr(ufs, DFES_DEF_L2_ERRS, UNIPRO_L2); in exynos_ufs_pre_link()
1003 exynos_ufs_config_intr(ufs, DFES_DEF_L3_ERRS, UNIPRO_L3); in exynos_ufs_pre_link()
1004 exynos_ufs_config_intr(ufs, DFES_DEF_L4_ERRS, UNIPRO_L4); in exynos_ufs_pre_link()
1005 exynos_ufs_set_unipro_pclk_div(ufs); in exynos_ufs_pre_link()
1008 exynos_ufs_config_unipro(ufs); in exynos_ufs_pre_link()
1010 /* m-phy */ in exynos_ufs_pre_link()
1011 exynos_ufs_phy_init(ufs); in exynos_ufs_pre_link()
1012 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONFIG_PHY_ATTR)) { in exynos_ufs_pre_link()
1013 exynos_ufs_config_phy_time_attr(ufs); in exynos_ufs_pre_link()
1014 exynos_ufs_config_phy_cap_attr(ufs); in exynos_ufs_pre_link()
1019 if (ufs->drv_data->pre_link) in exynos_ufs_pre_link()
1020 ufs->drv_data->pre_link(ufs); in exynos_ufs_pre_link()
1025 static void exynos_ufs_fit_aggr_timeout(struct exynos_ufs *ufs) in exynos_ufs_fit_aggr_timeout() argument
1030 if (ufs->opts & EXYNOS_UFS_OPT_TIMER_TICK_SELECT) { in exynos_ufs_fit_aggr_timeout()
1031 val = hci_readl(ufs, HCI_V2P1_CTRL); in exynos_ufs_fit_aggr_timeout()
1033 hci_writel(ufs, val, HCI_V2P1_CTRL); in exynos_ufs_fit_aggr_timeout()
1036 val = exynos_ufs_calc_time_cntr(ufs, IATOVAL_NSEC / CNTR_DIV_VAL); in exynos_ufs_fit_aggr_timeout()
1037 hci_writel(ufs, val & CNT_VAL_1US_MASK, HCI_1US_TO_CNT_VAL); in exynos_ufs_fit_aggr_timeout()
1042 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_post_link() local
1043 struct phy *generic_phy = ufs->phy; in exynos_ufs_post_link()
1044 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_post_link()
1046 exynos_ufs_establish_connt(ufs); in exynos_ufs_post_link()
1047 exynos_ufs_fit_aggr_timeout(ufs); in exynos_ufs_post_link()
1049 hci_writel(ufs, 0xa, HCI_DATA_REORDER); in exynos_ufs_post_link()
1050 hci_writel(ufs, ilog2(DATA_UNIT_SIZE), HCI_TXPRDT_ENTRY_SIZE); in exynos_ufs_post_link()
1051 hci_writel(ufs, ilog2(DATA_UNIT_SIZE), HCI_RXPRDT_ENTRY_SIZE); in exynos_ufs_post_link()
1052 hci_writel(ufs, (1 << hba->nutrs) - 1, HCI_UTRL_NEXUS_TYPE); in exynos_ufs_post_link()
1053 hci_writel(ufs, (1 << hba->nutmrs) - 1, HCI_UTMRL_NEXUS_TYPE); in exynos_ufs_post_link()
1054 hci_writel(ufs, 0xf, HCI_AXIDMA_RWDATA_BURST_LEN); in exynos_ufs_post_link()
1056 if (ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB) in exynos_ufs_post_link()
1060 if (attr->pa_granularity) { in exynos_ufs_post_link()
1063 attr->pa_granularity); in exynos_ufs_post_link()
1066 if (attr->pa_tactivate) in exynos_ufs_post_link()
1068 attr->pa_tactivate); in exynos_ufs_post_link()
1069 if (attr->pa_hibern8time && in exynos_ufs_post_link()
1070 !(ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER)) in exynos_ufs_post_link()
1072 attr->pa_hibern8time); in exynos_ufs_post_link()
1075 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) { in exynos_ufs_post_link()
1076 if (!attr->pa_granularity) in exynos_ufs_post_link()
1078 &attr->pa_granularity); in exynos_ufs_post_link()
1079 if (!attr->pa_hibern8time) in exynos_ufs_post_link()
1081 &attr->pa_hibern8time); in exynos_ufs_post_link()
1087 if (attr->pa_granularity < 1 || attr->pa_granularity > 6) { in exynos_ufs_post_link()
1089 dev_warn(hba->dev, in exynos_ufs_post_link()
1092 attr->pa_granularity); in exynos_ufs_post_link()
1093 attr->pa_granularity = 6; in exynos_ufs_post_link()
1099 if (ufs->drv_data->post_link) in exynos_ufs_post_link()
1100 ufs->drv_data->post_link(ufs); in exynos_ufs_post_link()
1105 static int exynos_ufs_parse_dt(struct device *dev, struct exynos_ufs *ufs) in exynos_ufs_parse_dt() argument
1107 struct device_node *np = dev->of_node; in exynos_ufs_parse_dt()
1111 ufs->drv_data = device_get_match_data(dev); in exynos_ufs_parse_dt()
1113 if (ufs->drv_data && ufs->drv_data->uic_attr) { in exynos_ufs_parse_dt()
1114 attr = ufs->drv_data->uic_attr; in exynos_ufs_parse_dt()
1117 ret = -EINVAL; in exynos_ufs_parse_dt()
1121 ufs->sysreg = syscon_regmap_lookup_by_phandle(np, "samsung,sysreg"); in exynos_ufs_parse_dt()
1122 if (IS_ERR(ufs->sysreg)) in exynos_ufs_parse_dt()
1123 ufs->sysreg = NULL; in exynos_ufs_parse_dt()
1126 &ufs->shareability_reg_offset)) { in exynos_ufs_parse_dt()
1128 ufs->shareability_reg_offset = UFS_SHAREABILITY_OFFSET; in exynos_ufs_parse_dt()
1132 ufs->pclk_avail_min = PCLK_AVAIL_MIN; in exynos_ufs_parse_dt()
1133 ufs->pclk_avail_max = PCLK_AVAIL_MAX; in exynos_ufs_parse_dt()
1135 attr->rx_adv_fine_gran_sup_en = RX_ADV_FINE_GRAN_SUP_EN; in exynos_ufs_parse_dt()
1136 attr->rx_adv_fine_gran_step = RX_ADV_FINE_GRAN_STEP_VAL; in exynos_ufs_parse_dt()
1137 attr->rx_adv_min_actv_time_cap = RX_ADV_MIN_ACTV_TIME_CAP; in exynos_ufs_parse_dt()
1138 attr->pa_granularity = PA_GRANULARITY_VAL; in exynos_ufs_parse_dt()
1139 attr->pa_tactivate = PA_TACTIVATE_VAL; in exynos_ufs_parse_dt()
1140 attr->pa_hibern8time = PA_HIBERN8TIME_VAL; in exynos_ufs_parse_dt()
1147 struct exynos_ufs *ufs) in exynos_ufs_priv_init() argument
1149 ufs->hba = hba; in exynos_ufs_priv_init()
1150 ufs->opts = ufs->drv_data->opts; in exynos_ufs_priv_init()
1151 ufs->rx_sel_idx = PA_MAXDATALANES; in exynos_ufs_priv_init()
1152 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_RX_SEL_IDX) in exynos_ufs_priv_init()
1153 ufs->rx_sel_idx = 0; in exynos_ufs_priv_init()
1154 hba->priv = (void *)ufs; in exynos_ufs_priv_init()
1155 hba->quirks = ufs->drv_data->quirks; in exynos_ufs_priv_init()
1162 * hardware on Exynos and Exynos-based SoCs. The interface to this hardware is
1163 * not compatible with the standard UFS crypto. It requires that encryption be
1177 * struct fmp_sg_entry - nonstandard format of PRDT entries when FMP is enabled
1180 * bits of the 'size' field, i.e. the last 32-bit word. When these
1185 * @file_enckey: The first half of the AES-XTS key with all bytes reserved
1186 * @file_twkey: The second half of the AES-XTS key with all bytes reserved
1212 static void exynos_ufs_fmp_init(struct ufs_hba *hba, struct exynos_ufs *ufs) in exynos_ufs_fmp_init() argument
1214 struct blk_crypto_profile *profile = &hba->crypto_profile; in exynos_ufs_fmp_init()
1231 * downstream driver source for gs101 and other Exynos-based SoCs. It in exynos_ufs_fmp_init()
1234 * on other Exynos-based SoCs too, and might even still be the only way in exynos_ufs_fmp_init()
1239 if (!(ufs->opts & EXYNOS_UFS_OPT_UFSPR_SECURE)) in exynos_ufs_fmp_init()
1250 dev_warn(hba->dev, in exynos_ufs_fmp_init()
1263 dev_err(hba->dev, in exynos_ufs_fmp_init()
1270 err = devm_blk_crypto_profile_init(hba->dev, profile, 0); in exynos_ufs_fmp_init()
1273 dev_err(hba->dev, "Failed to initialize crypto profile: %d\n", in exynos_ufs_fmp_init()
1277 profile->max_dun_bytes_supported = AES_BLOCK_SIZE; in exynos_ufs_fmp_init()
1278 profile->dev = hba->dev; in exynos_ufs_fmp_init()
1279 profile->modes_supported[BLK_ENCRYPTION_MODE_AES_256_XTS] = in exynos_ufs_fmp_init()
1282 /* Advertise crypto support to ufshcd-core. */ in exynos_ufs_fmp_init()
1283 hba->caps |= UFSHCD_CAP_CRYPTO; in exynos_ufs_fmp_init()
1285 /* Advertise crypto quirks to ufshcd-core. */ in exynos_ufs_fmp_init()
1286 hba->quirks |= UFSHCD_QUIRK_CUSTOM_CRYPTO_PROFILE | in exynos_ufs_fmp_init()
1296 if (!(hba->caps & UFSHCD_CAP_CRYPTO)) in exynos_ufs_fmp_resume()
1302 dev_err(hba->dev, in exynos_ufs_fmp_resume()
1308 dev_err(hba->dev, in exynos_ufs_fmp_resume()
1315 key + AES_KEYSIZE_256 - (j + 1) * sizeof(u64))); in fmp_key_word()
1324 const u8 *enckey = crypt_ctx->bc_key->raw; in exynos_ufs_fmp_fill_prdt()
1326 u64 dun_lo = crypt_ctx->bc_dun[0]; in exynos_ufs_fmp_fill_prdt()
1327 u64 dun_hi = crypt_ctx->bc_dun[1]; in exynos_ufs_fmp_fill_prdt()
1331 if (WARN_ON_ONCE(!(hba->caps & UFSHCD_CAP_CRYPTO))) in exynos_ufs_fmp_fill_prdt()
1332 return -EIO; in exynos_ufs_fmp_fill_prdt()
1340 if (prd->base.size != cpu_to_le32(DATA_UNIT_SIZE - 1)) { in exynos_ufs_fmp_fill_prdt()
1341 dev_err(hba->dev, in exynos_ufs_fmp_fill_prdt()
1343 return -EIO; in exynos_ufs_fmp_fill_prdt()
1347 prd->base.size |= cpu_to_le32((FMP_ALGO_MODE_AES_XTS << 28) | in exynos_ufs_fmp_fill_prdt()
1351 prd->file_iv[0] = cpu_to_be64(dun_hi); in exynos_ufs_fmp_fill_prdt()
1352 prd->file_iv[1] = cpu_to_be64(dun_lo); in exynos_ufs_fmp_fill_prdt()
1356 prd->file_enckey[j] = fmp_key_word(enckey, j); in exynos_ufs_fmp_fill_prdt()
1357 prd->file_twkey[j] = fmp_key_word(twkey, j); in exynos_ufs_fmp_fill_prdt()
1370 static void exynos_ufs_fmp_init(struct ufs_hba *hba, struct exynos_ufs *ufs) in exynos_ufs_fmp_init() argument
1384 struct device *dev = hba->dev; in exynos_ufs_init()
1386 struct exynos_ufs *ufs; in exynos_ufs_init() local
1389 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL); in exynos_ufs_init()
1390 if (!ufs) in exynos_ufs_init()
1391 return -ENOMEM; in exynos_ufs_init()
1393 /* exynos-specific hci */ in exynos_ufs_init()
1394 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci"); in exynos_ufs_init()
1395 if (IS_ERR(ufs->reg_hci)) { in exynos_ufs_init()
1397 return PTR_ERR(ufs->reg_hci); in exynos_ufs_init()
1401 ufs->reg_unipro = devm_platform_ioremap_resource_byname(pdev, "unipro"); in exynos_ufs_init()
1402 if (IS_ERR(ufs->reg_unipro)) { in exynos_ufs_init()
1404 return PTR_ERR(ufs->reg_unipro); in exynos_ufs_init()
1407 /* ufs protector */ in exynos_ufs_init()
1408 ufs->reg_ufsp = devm_platform_ioremap_resource_byname(pdev, "ufsp"); in exynos_ufs_init()
1409 if (IS_ERR(ufs->reg_ufsp)) { in exynos_ufs_init()
1410 dev_err(dev, "cannot ioremap for ufs protector register\n"); in exynos_ufs_init()
1411 return PTR_ERR(ufs->reg_ufsp); in exynos_ufs_init()
1414 ret = exynos_ufs_parse_dt(dev, ufs); in exynos_ufs_init()
1420 ufs->phy = devm_phy_get(dev, "ufs-phy"); in exynos_ufs_init()
1421 if (IS_ERR(ufs->phy)) { in exynos_ufs_init()
1422 ret = PTR_ERR(ufs->phy); in exynos_ufs_init()
1423 dev_err(dev, "failed to get ufs-phy\n"); in exynos_ufs_init()
1427 exynos_ufs_priv_init(hba, ufs); in exynos_ufs_init()
1429 exynos_ufs_fmp_init(hba, ufs); in exynos_ufs_init()
1431 if (ufs->drv_data->drv_init) { in exynos_ufs_init()
1432 ret = ufs->drv_data->drv_init(dev, ufs); in exynos_ufs_init()
1434 dev_err(dev, "failed to init drv-data\n"); in exynos_ufs_init()
1439 ret = exynos_ufs_get_clk_info(ufs); in exynos_ufs_init()
1442 exynos_ufs_specify_phy_time_attr(ufs); in exynos_ufs_init()
1443 if (!(ufs->opts & EXYNOS_UFS_OPT_UFSPR_SECURE)) in exynos_ufs_init()
1444 exynos_ufs_config_smu(ufs); in exynos_ufs_init()
1446 hba->host->dma_alignment = DATA_UNIT_SIZE - 1; in exynos_ufs_init()
1450 hba->priv = NULL; in exynos_ufs_init()
1456 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_host_reset() local
1461 exynos_ufs_disable_auto_ctrl_hcc_save(ufs, &val); in exynos_ufs_host_reset()
1463 hci_writel(ufs, UFS_SW_RST_MASK, HCI_SW_RST); in exynos_ufs_host_reset()
1466 if (!(hci_readl(ufs, HCI_SW_RST) & UFS_SW_RST_MASK)) in exynos_ufs_host_reset()
1470 dev_err(hba->dev, "timeout host sw-reset\n"); in exynos_ufs_host_reset()
1471 ret = -ETIMEDOUT; in exynos_ufs_host_reset()
1474 exynos_ufs_auto_ctrl_hcc_restore(ufs, &val); in exynos_ufs_host_reset()
1480 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_dev_hw_reset() local
1482 hci_writel(ufs, 0 << 0, HCI_GPIO_OUT); in exynos_ufs_dev_hw_reset()
1484 hci_writel(ufs, 1 << 0, HCI_GPIO_OUT); in exynos_ufs_dev_hw_reset()
1489 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_pre_hibern8() local
1490 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in exynos_ufs_pre_hibern8()
1493 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL) in exynos_ufs_pre_hibern8()
1494 exynos_ufs_disable_auto_ctrl_hcc(ufs); in exynos_ufs_pre_hibern8()
1495 exynos_ufs_ungate_clks(ufs); in exynos_ufs_pre_hibern8()
1497 if (ufs->opts & EXYNOS_UFS_OPT_USE_SW_HIBERN8_TIMER) { in exynos_ufs_pre_hibern8()
1501 int h8_time = attr->pa_hibern8time * in exynos_ufs_pre_hibern8()
1502 granularity_tbl[attr->pa_granularity - 1]; in exynos_ufs_pre_hibern8()
1507 delta = h8_time - ktime_us_delta(ktime_get(), in exynos_ufs_pre_hibern8()
1508 ufs->entry_hibern8_t); in exynos_ufs_pre_hibern8()
1522 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_post_hibern8() local
1528 if (ufshcd_is_hs_mode(&ufs->dev_req_params)) in exynos_ufs_post_hibern8()
1535 dev_warn(hba->dev, "%s: power mode change\n", __func__); in exynos_ufs_post_hibern8()
1536 hba->pwr_info.pwr_rx = (cur_mode >> 4) & 0xf; in exynos_ufs_post_hibern8()
1537 hba->pwr_info.pwr_tx = cur_mode & 0xf; in exynos_ufs_post_hibern8()
1538 ufshcd_config_pwr_mode(hba, &hba->max_pwr_info.info); in exynos_ufs_post_hibern8()
1541 if (!(ufs->opts & EXYNOS_UFS_OPT_SKIP_CONNECTION_ESTAB)) in exynos_ufs_post_hibern8()
1542 exynos_ufs_establish_connt(ufs); in exynos_ufs_post_hibern8()
1544 ufs->entry_hibern8_t = ktime_get(); in exynos_ufs_post_hibern8()
1545 exynos_ufs_gate_clks(ufs); in exynos_ufs_post_hibern8()
1546 if (ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL) in exynos_ufs_post_hibern8()
1547 exynos_ufs_enable_auto_ctrl_hcc(ufs); in exynos_ufs_post_hibern8()
1554 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_hce_enable_notify() local
1565 hba->host->max_segment_size = DATA_UNIT_SIZE; in exynos_ufs_hce_enable_notify()
1567 if (ufs->drv_data->pre_hce_enable) { in exynos_ufs_hce_enable_notify()
1568 ret = ufs->drv_data->pre_hce_enable(ufs); in exynos_ufs_hce_enable_notify()
1579 exynos_ufs_calc_pwm_clk_div(ufs); in exynos_ufs_hce_enable_notify()
1580 if (!(ufs->opts & EXYNOS_UFS_OPT_BROKEN_AUTO_CLK_CTRL)) in exynos_ufs_hce_enable_notify()
1581 exynos_ufs_enable_auto_ctrl_hcc(ufs); in exynos_ufs_hce_enable_notify()
1583 if (ufs->drv_data->post_hce_enable) in exynos_ufs_hce_enable_notify()
1584 ret = ufs->drv_data->post_hce_enable(ufs); in exynos_ufs_hce_enable_notify()
1646 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_suspend() local
1652 phy_power_off(ufs->phy); in exynos_ufs_suspend()
1659 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_resume() local
1662 phy_power_on(ufs->phy); in exynos_ufs_resume()
1664 exynos_ufs_config_smu(ufs); in exynos_ufs_resume()
1699 return -ETIME; in exynosauto_ufs_vh_wait_ph_ready()
1704 struct device *dev = hba->dev; in exynosauto_ufs_vh_init()
1706 struct exynos_ufs *ufs; in exynosauto_ufs_vh_init() local
1709 ufs = devm_kzalloc(dev, sizeof(*ufs), GFP_KERNEL); in exynosauto_ufs_vh_init()
1710 if (!ufs) in exynosauto_ufs_vh_init()
1711 return -ENOMEM; in exynosauto_ufs_vh_init()
1713 /* exynos-specific hci */ in exynosauto_ufs_vh_init()
1714 ufs->reg_hci = devm_platform_ioremap_resource_byname(pdev, "vs_hci"); in exynosauto_ufs_vh_init()
1715 if (IS_ERR(ufs->reg_hci)) { in exynosauto_ufs_vh_init()
1717 return PTR_ERR(ufs->reg_hci); in exynosauto_ufs_vh_init()
1724 ufs->drv_data = device_get_match_data(dev); in exynosauto_ufs_vh_init()
1725 if (!ufs->drv_data) in exynosauto_ufs_vh_init()
1726 return -ENODEV; in exynosauto_ufs_vh_init()
1728 exynos_ufs_priv_init(hba, ufs); in exynosauto_ufs_vh_init()
1733 static int fsd_ufs_pre_link(struct exynos_ufs *ufs) in fsd_ufs_pre_link() argument
1735 struct exynos_ufs_uic_attr *attr = ufs->drv_data->uic_attr; in fsd_ufs_pre_link()
1736 struct ufs_hba *hba = ufs->hba; in fsd_ufs_pre_link()
1739 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_clk_period_off), in fsd_ufs_pre_link()
1740 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in fsd_ufs_pre_link()
1744 for_each_ufs_tx_lane(ufs, i) { in fsd_ufs_pre_link()
1746 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in fsd_ufs_pre_link()
1750 for_each_ufs_rx_lane(ufs, i) { in fsd_ufs_pre_link()
1752 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in fsd_ufs_pre_link()
1764 ufshcd_dme_set(hba, UIC_ARG_MIB(attr->pa_dbg_opt_suite1_off), in fsd_ufs_pre_link()
1768 exynos_ufs_establish_connt(ufs); in fsd_ufs_pre_link()
1773 static int fsd_ufs_post_link(struct exynos_ufs *ufs) in fsd_ufs_post_link() argument
1776 struct ufs_hba *hba = ufs->hba; in fsd_ufs_post_link()
1799 for_each_ufs_rx_lane(ufs, i) { in fsd_ufs_post_link()
1811 static int fsd_ufs_pre_pwr_change(struct exynos_ufs *ufs, in fsd_ufs_pre_pwr_change() argument
1814 struct ufs_hba *hba = ufs->hba; in fsd_ufs_pre_pwr_change()
1822 unipro_writel(ufs, 12000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER0); in fsd_ufs_pre_pwr_change()
1823 unipro_writel(ufs, 32000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER1); in fsd_ufs_pre_pwr_change()
1824 unipro_writel(ufs, 16000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER2); in fsd_ufs_pre_pwr_change()
1829 static inline u32 get_mclk_period_unipro_18(struct exynos_ufs *ufs) in get_mclk_period_unipro_18() argument
1831 return (16 * 1000 * 1000000UL / ufs->mclk_rate); in get_mclk_period_unipro_18()
1834 static int gs101_ufs_pre_link(struct exynos_ufs *ufs) in gs101_ufs_pre_link() argument
1836 struct ufs_hba *hba = ufs->hba; in gs101_ufs_pre_link()
1840 rx_line_reset_period = (RX_LINE_RESET_TIME * ufs->mclk_rate) in gs101_ufs_pre_link()
1842 tx_line_reset_period = (TX_LINE_RESET_TIME * ufs->mclk_rate) in gs101_ufs_pre_link()
1845 unipro_writel(ufs, get_mclk_period_unipro_18(ufs), COMP_CLK_PERIOD); in gs101_ufs_pre_link()
1849 for_each_ufs_rx_lane(ufs, i) { in gs101_ufs_pre_link()
1851 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in gs101_ufs_pre_link()
1864 for_each_ufs_tx_lane(ufs, i) { in gs101_ufs_pre_link()
1866 DIV_ROUND_UP(NSEC_PER_SEC, ufs->mclk_rate)); in gs101_ufs_pre_link()
1890 static int gs101_ufs_post_link(struct exynos_ufs *ufs) in gs101_ufs_post_link() argument
1892 struct ufs_hba *hba = ufs->hba; in gs101_ufs_post_link()
1901 static int gs101_ufs_pre_pwr_change(struct exynos_ufs *ufs, in gs101_ufs_pre_pwr_change() argument
1904 struct ufs_hba *hba = ufs->hba; in gs101_ufs_pre_pwr_change()
1909 unipro_writel(ufs, 8064, UNIPRO_DME_POWERMODE_REQ_LOCALL2TIMER0); in gs101_ufs_pre_pwr_change()
1910 unipro_writel(ufs, 28224, UNIPRO_DME_POWERMODE_REQ_LOCALL2TIMER1); in gs101_ufs_pre_pwr_change()
1911 unipro_writel(ufs, 20160, UNIPRO_DME_POWERMODE_REQ_LOCALL2TIMER2); in gs101_ufs_pre_pwr_change()
1912 unipro_writel(ufs, 12000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER0); in gs101_ufs_pre_pwr_change()
1913 unipro_writel(ufs, 32000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER1); in gs101_ufs_pre_pwr_change()
1914 unipro_writel(ufs, 16000, UNIPRO_DME_POWERMODE_REQ_REMOTEL2TIMER2); in gs101_ufs_pre_pwr_change()
1943 struct device *dev = &pdev->dev; in exynos_ufs_probe()
1948 if (drv_data && drv_data->vops) in exynos_ufs_probe()
1949 vops = drv_data->vops; in exynos_ufs_probe()
1961 struct exynos_ufs *ufs = ufshcd_get_variant(hba); in exynos_ufs_remove() local
1963 pm_runtime_get_sync(&(pdev)->dev); in exynos_ufs_remove()
1966 phy_power_off(ufs->phy); in exynos_ufs_remove()
1967 phy_exit(ufs->phy); in exynos_ufs_remove()
2136 { .compatible = "google,gs101-ufs",
2138 { .compatible = "samsung,exynos7-ufs",
2140 { .compatible = "samsung,exynosautov9-ufs",
2142 { .compatible = "samsung,exynosautov9-ufs-vh",
2144 { .compatible = "tesla,fsd-ufs",
2161 .name = "exynos-ufshc",
2170 MODULE_DESCRIPTION("Exynos UFS HCI Driver");