Home
last modified time | relevance | path

Searched refs:rdev (Results 1 – 25 of 461) sorted by relevance

12345678910>>...19

/linux/drivers/gpu/drm/radeon/
H A Dr420.c45 void r420_pm_init_profile(struct radeon_device *rdev) in r420_pm_init_profile() argument
48 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
49 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index; in r420_pm_init_profile()
50 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
51 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
53 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
54 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = 0; in r420_pm_init_profile()
55 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0; in r420_pm_init_profile()
56 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0; in r420_pm_init_profile()
58 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = 0; in r420_pm_init_profile()
[all …]
H A Dr520.c36 int r520_mc_wait_for_idle(struct radeon_device *rdev) in r520_mc_wait_for_idle() argument
41 for (i = 0; i < rdev->usec_timeout; i++) { in r520_mc_wait_for_idle()
52 static void r520_gpu_init(struct radeon_device *rdev) in r520_gpu_init() argument
56 rv515_vga_render_disable(rdev); in r520_gpu_init()
78 if (rdev->family == CHIP_RV530) { in r520_gpu_init()
81 r420_pipes_init(rdev); in r520_gpu_init()
88 if (r520_mc_wait_for_idle(rdev)) { in r520_gpu_init()
93 static void r520_vram_get_type(struct radeon_device *rdev) in r520_vram_get_type() argument
97 rdev->mc.vram_width = 128; in r520_vram_get_type()
98 rdev->mc.vram_is_ddr = true; in r520_vram_get_type()
[all …]
H A Dni.c47 u32 tn_smc_rreg(struct radeon_device *rdev, u32 reg) in tn_smc_rreg() argument
52 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
55 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_rreg()
59 void tn_smc_wreg(struct radeon_device *rdev, u32 reg, u32 v) in tn_smc_wreg() argument
63 spin_lock_irqsave(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
66 spin_unlock_irqrestore(&rdev->smc_idx_lock, flags); in tn_smc_wreg()
437 static void ni_init_golden_registers(struct radeon_device *rdev) in ni_init_golden_registers() argument
439 switch (rdev->family) { in ni_init_golden_registers()
441 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
444 radeon_program_register_sequence(rdev, in ni_init_golden_registers()
[all …]
H A Drs400.c41 static void rs400_debugfs_pcie_gart_info_init(struct radeon_device *rdev);
43 void rs400_gart_adjust_size(struct radeon_device *rdev) in rs400_gart_adjust_size() argument
46 switch (rdev->mc.gtt_size/(1024*1024)) { in rs400_gart_adjust_size()
57 (unsigned)(rdev->mc.gtt_size >> 20)); in rs400_gart_adjust_size()
60 rdev->mc.gtt_size = 32 * 1024 * 1024; in rs400_gart_adjust_size()
65 void rs400_gart_tlb_flush(struct radeon_device *rdev) in rs400_gart_tlb_flush() argument
68 unsigned int timeout = rdev->usec_timeout; in rs400_gart_tlb_flush()
81 int rs400_gart_init(struct radeon_device *rdev) in rs400_gart_init() argument
85 if (rdev->gart.ptr) { in rs400_gart_init()
90 switch (rdev->mc.gtt_size / (1024 * 1024)) { in rs400_gart_init()
[all …]
H A Dradeon_irq_kms.c56 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_handler_kms() local
59 ret = radeon_irq_process(rdev); in radeon_driver_irq_handler_kms()
81 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_hotplug_work_func() local
83 struct drm_device *dev = rdev_to_drm(rdev); in radeon_hotplug_work_func()
89 if (!rdev->mode_info.mode_config_initialized) in radeon_hotplug_work_func()
102 struct radeon_device *rdev = container_of(work, struct radeon_device, in radeon_dp_work_func() local
104 struct drm_device *dev = rdev_to_drm(rdev); in radeon_dp_work_func()
124 struct radeon_device *rdev = dev->dev_private; in radeon_driver_irq_preinstall_kms() local
128 spin_lock_irqsave(&rdev->irq.lock, irqflags); in radeon_driver_irq_preinstall_kms()
131 atomic_set(&rdev->irq.ring_int[i], 0); in radeon_driver_irq_preinstall_kms()
[all …]
H A Dr600.c108 static void r600_debugfs_mc_info_init(struct radeon_device *rdev);
111 int r600_mc_wait_for_idle(struct radeon_device *rdev);
112 static void r600_gpu_init(struct radeon_device *rdev);
113 void r600_fini(struct radeon_device *rdev);
114 void r600_irq_disable(struct radeon_device *rdev);
115 static void r600_pcie_gen2_enable(struct radeon_device *rdev);
120 u32 r600_rcu_rreg(struct radeon_device *rdev, u32 reg) in r600_rcu_rreg() argument
125 spin_lock_irqsave(&rdev->rcu_idx_lock, flags); in r600_rcu_rreg()
128 spin_unlock_irqrestore(&rdev->rcu_idx_lock, flags); in r600_rcu_rreg()
132 void r600_rcu_wreg(struct radeon_device *rdev, u32 reg, u32 v) in r600_rcu_wreg() argument
[all …]
H A Drv770.c49 static void rv770_gpu_init(struct radeon_device *rdev);
50 void rv770_fini(struct radeon_device *rdev);
51 static void rv770_pcie_gen2_enable(struct radeon_device *rdev);
52 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk);
54 int rv770_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk) in rv770_set_uvd_clocks() argument
60 if (rdev->family == CHIP_RV740) in rv770_set_uvd_clocks()
61 return evergreen_set_uvd_clocks(rdev, vclk, dclk); in rv770_set_uvd_clocks()
74 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 50000, 160000, in rv770_set_uvd_clocks()
94 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
125 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL); in rv770_set_uvd_clocks()
[all …]
H A Drs600.c54 static void rs600_gpu_init(struct radeon_device *rdev);
55 int rs600_mc_wait_for_idle(struct radeon_device *rdev);
62 static bool avivo_is_in_vblank(struct radeon_device *rdev, int crtc) in avivo_is_in_vblank() argument
70 static bool avivo_is_counter_moving(struct radeon_device *rdev, int crtc) in avivo_is_counter_moving() argument
91 void avivo_wait_for_vblank(struct radeon_device *rdev, int crtc) in avivo_wait_for_vblank() argument
95 if (crtc >= rdev->num_crtc) in avivo_wait_for_vblank()
104 while (avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
106 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
111 while (!avivo_is_in_vblank(rdev, crtc)) { in avivo_wait_for_vblank()
113 if (!avivo_is_counter_moving(rdev, crtc)) in avivo_wait_for_vblank()
[all …]
H A Dradeon_fence.c67 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) in radeon_fence_write() argument
69 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_write()
71 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_write()
88 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) in radeon_fence_read() argument
90 struct radeon_fence_driver *drv = &rdev->fence_drv[ring]; in radeon_fence_read()
93 if (likely(rdev->wb.enabled || !drv->scratch_reg)) { in radeon_fence_read()
112 static void radeon_fence_schedule_check(struct radeon_device *rdev, int ring) in radeon_fence_schedule_check() argument
119 &rdev->fence_drv[ring].lockup_work, in radeon_fence_schedule_check()
133 int radeon_fence_emit(struct radeon_device *rdev, in radeon_fence_emit() argument
144 (*fence)->rdev = rdev; in radeon_fence_emit()
[all …]
H A Devergreen.c57 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg) in eg_cg_rreg() argument
62 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
65 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_rreg()
69 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_cg_wreg() argument
73 spin_lock_irqsave(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
76 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags); in eg_cg_wreg()
79 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg) in eg_pif_phy0_rreg() argument
84 spin_lock_irqsave(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
87 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags); in eg_pif_phy0_rreg()
91 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v) in eg_pif_phy0_wreg() argument
[all …]
H A Drv6xx_dpm.c33 static u32 rv6xx_scale_count_given_unit(struct radeon_device *rdev,
43 static struct rv6xx_power_info *rv6xx_get_pi(struct radeon_device *rdev) in rv6xx_get_pi() argument
45 struct rv6xx_power_info *pi = rdev->pm.dpm.priv; in rv6xx_get_pi()
50 static void rv6xx_force_pcie_gen1(struct radeon_device *rdev) in rv6xx_force_pcie_gen1() argument
63 for (i = 0; i < rdev->usec_timeout; i++) { in rv6xx_force_pcie_gen1()
74 static void rv6xx_enable_pcie_gen2_support(struct radeon_device *rdev) in rv6xx_enable_pcie_gen2_support() argument
87 static void rv6xx_enable_bif_dynamic_pcie_gen2(struct radeon_device *rdev, in rv6xx_enable_bif_dynamic_pcie_gen2() argument
100 static void rv6xx_enable_l0s(struct radeon_device *rdev) in rv6xx_enable_l0s() argument
109 static void rv6xx_enable_l1(struct radeon_device *rdev) in rv6xx_enable_l1() argument
121 static void rv6xx_enable_pll_sleep_in_l1(struct radeon_device *rdev) in rv6xx_enable_pll_sleep_in_l1() argument
[all …]
H A Drs690.c37 int rs690_mc_wait_for_idle(struct radeon_device *rdev) in rs690_mc_wait_for_idle() argument
42 for (i = 0; i < rdev->usec_timeout; i++) { in rs690_mc_wait_for_idle()
52 static void rs690_gpu_init(struct radeon_device *rdev) in rs690_gpu_init() argument
55 r420_pipes_init(rdev); in rs690_gpu_init()
56 if (rs690_mc_wait_for_idle(rdev)) { in rs690_gpu_init()
66 void rs690_pm_info(struct radeon_device *rdev) in rs690_pm_info() argument
74 if (atom_parse_data_header(rdev->mode_info.atom_context, index, NULL, in rs690_pm_info()
76 info = (union igp_info *)(rdev->mode_info.atom_context->bios + data_offset); in rs690_pm_info()
82 rdev->pm.igp_sideport_mclk.full = dfixed_const(le32_to_cpu(info->info.ulBootUpMemoryClock)); in rs690_pm_info()
83 rdev->pm.igp_sideport_mclk.full = dfixed_div(rdev->pm.igp_sideport_mclk, tmp); in rs690_pm_info()
[all …]
H A Dkv_dpm.c37 static int kv_enable_nb_dpm(struct radeon_device *rdev,
39 static void kv_init_graphics_levels(struct radeon_device *rdev);
40 static int kv_calculate_ds_divider(struct radeon_device *rdev);
41 static int kv_calculate_nbps_level_settings(struct radeon_device *rdev);
42 static int kv_calculate_dpm_settings(struct radeon_device *rdev);
43 static void kv_enable_new_levels(struct radeon_device *rdev);
44 static void kv_program_nbps_index_settings(struct radeon_device *rdev,
46 static int kv_set_enabled_level(struct radeon_device *rdev, u32 level);
47 static int kv_set_enabled_levels(struct radeon_device *rdev);
48 static int kv_force_dpm_highest(struct radeon_device *rdev);
[all …]
H A Drs780_dpm.c42 static struct igp_power_info *rs780_get_pi(struct radeon_device *rdev) in rs780_get_pi() argument
44 struct igp_power_info *pi = rdev->pm.dpm.priv; in rs780_get_pi()
49 static void rs780_get_pm_mode_parameters(struct radeon_device *rdev) in rs780_get_pm_mode_parameters() argument
51 struct igp_power_info *pi = rs780_get_pi(rdev); in rs780_get_pm_mode_parameters()
52 struct radeon_mode_info *minfo = &rdev->mode_info; in rs780_get_pm_mode_parameters()
61 for (i = 0; i < rdev->num_crtc; i++) { in rs780_get_pm_mode_parameters()
73 static void rs780_voltage_scaling_enable(struct radeon_device *rdev, bool enable);
75 static int rs780_initialize_dpm_power_state(struct radeon_device *rdev, in rs780_initialize_dpm_power_state() argument
82 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, in rs780_initialize_dpm_power_state()
87 r600_engine_clock_entry_set_reference_divider(rdev, 0, dividers.ref_div); in rs780_initialize_dpm_power_state()
[all …]
H A Dbtc_dpm.c53 extern int ni_mc_load_microcode(struct radeon_device *rdev);
1198 static u32 btc_get_valid_mclk(struct radeon_device *rdev, in btc_get_valid_mclk() argument
1201 return btc_find_valid_clock(&rdev->pm.dpm.dyn_state.valid_mclk_values, in btc_get_valid_mclk()
1205 static u32 btc_get_valid_sclk(struct radeon_device *rdev, in btc_get_valid_sclk() argument
1208 return btc_find_valid_clock(&rdev->pm.dpm.dyn_state.valid_sclk_values, in btc_get_valid_sclk()
1212 void btc_skip_blacklist_clocks(struct radeon_device *rdev, in btc_skip_blacklist_clocks() argument
1231 *sclk = btc_get_valid_sclk(rdev, max_sclk, *sclk + 1); in btc_skip_blacklist_clocks()
1234 btc_skip_blacklist_clocks(rdev, max_sclk, max_mclk, sclk, mclk); in btc_skip_blacklist_clocks()
1239 void btc_adjust_clock_combinations(struct radeon_device *rdev, in btc_adjust_clock_combinations() argument
1251 if (((pl->mclk + (pl->sclk - 1)) / pl->sclk) > rdev->pm.dpm.dyn_state.mclk_sclk_ratio) in btc_adjust_clock_combinations()
[all …]
H A Dtrinity_dpm.c293 static void trinity_program_clk_gating_hw_sequence(struct radeon_device *rdev,
295 static void trinity_override_dynamic_mg_powergating(struct radeon_device *rdev);
296 static void trinity_apply_state_adjust_rules(struct radeon_device *rdev,
307 static struct trinity_power_info *trinity_get_pi(struct radeon_device *rdev) in trinity_get_pi() argument
309 struct trinity_power_info *pi = rdev->pm.dpm.priv; in trinity_get_pi()
314 static void trinity_gfx_powergating_initialize(struct radeon_device *rdev) in trinity_gfx_powergating_initialize() argument
316 struct trinity_power_info *pi = trinity_get_pi(rdev); in trinity_gfx_powergating_initialize()
320 u32 xclk = radeon_get_xclk(rdev); in trinity_gfx_powergating_initialize()
325 ret = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM, in trinity_gfx_powergating_initialize()
345 trinity_override_dynamic_mg_powergating(rdev); in trinity_gfx_powergating_initialize()
[all …]
H A Dradeon_ib.c45 static void radeon_debugfs_sa_init(struct radeon_device *rdev);
60 int radeon_ib_get(struct radeon_device *rdev, int ring, in radeon_ib_get() argument
66 r = radeon_sa_bo_new(&rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
68 dev_err(rdev->dev, "failed to get a new IB (%d)\n", r); in radeon_ib_get()
99 void radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib) in radeon_ib_free() argument
101 radeon_sync_free(rdev, &ib->sync, ib->fence); in radeon_ib_free()
127 int radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib, in radeon_ib_schedule() argument
130 struct radeon_ring *ring = &rdev->ring[ib->ring]; in radeon_ib_schedule()
135 dev_err(rdev->dev, "couldn't schedule ib\n"); in radeon_ib_schedule()
140 r = radeon_ring_lock(rdev, ring, 64 + RADEON_NUM_SYNCS * 8); in radeon_ib_schedule()
[all …]
/linux/net/wireless/
H A Drdev-ops.h15 static inline int rdev_suspend(struct cfg80211_registered_device *rdev, in rdev_suspend() argument
19 trace_rdev_suspend(&rdev->wiphy, wowlan); in rdev_suspend()
20 ret = rdev->ops->suspend(&rdev->wiphy, wowlan); in rdev_suspend()
21 trace_rdev_return_int(&rdev->wiphy, ret); in rdev_suspend()
25 static inline int rdev_resume(struct cfg80211_registered_device *rdev) in rdev_resume() argument
28 trace_rdev_resume(&rdev->wiphy); in rdev_resume()
29 ret = rdev->ops->resume(&rdev->wiphy); in rdev_resume()
30 trace_rdev_return_int(&rdev->wiphy, ret); in rdev_resume()
34 static inline void rdev_set_wakeup(struct cfg80211_registered_device *rdev, in rdev_set_wakeup() argument
37 trace_rdev_set_wakeup(&rdev->wiphy, enabled); in rdev_set_wakeup()
[all …]
H A Dcore.c62 struct cfg80211_registered_device *result = NULL, *rdev; in cfg80211_rdev_by_wiphy_idx() local
66 for_each_rdev(rdev) { in cfg80211_rdev_by_wiphy_idx()
67 if (rdev->wiphy_idx == wiphy_idx) { in cfg80211_rdev_by_wiphy_idx()
68 result = rdev; in cfg80211_rdev_by_wiphy_idx()
78 struct cfg80211_registered_device *rdev = wiphy_to_rdev(wiphy); in get_wiphy_idx() local
80 return rdev->wiphy_idx; in get_wiphy_idx()
85 struct cfg80211_registered_device *rdev; in wiphy_idx_to_wiphy() local
89 rdev = cfg80211_rdev_by_wiphy_idx(wiphy_idx); in wiphy_idx_to_wiphy()
90 if (!rdev) in wiphy_idx_to_wiphy()
92 return &rdev->wiphy; in wiphy_idx_to_wiphy()
[all …]
/linux/net/ieee802154/
H A Drdev-ops.h11 rdev_add_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, in rdev_add_virtual_intf_deprecated() argument
16 return rdev->ops->add_virtual_intf_deprecated(&rdev->wpan_phy, name, in rdev_add_virtual_intf_deprecated()
21 rdev_del_virtual_intf_deprecated(struct cfg802154_registered_device *rdev, in rdev_del_virtual_intf_deprecated() argument
24 rdev->ops->del_virtual_intf_deprecated(&rdev->wpan_phy, dev); in rdev_del_virtual_intf_deprecated()
28 rdev_suspend(struct cfg802154_registered_device *rdev) in rdev_suspend() argument
31 trace_802154_rdev_suspend(&rdev->wpan_phy); in rdev_suspend()
32 ret = rdev->ops->suspend(&rdev->wpan_phy); in rdev_suspend()
33 trace_802154_rdev_return_int(&rdev->wpan_phy, ret); in rdev_suspend()
38 rdev_resume(struct cfg802154_registered_device *rdev) in rdev_resume() argument
41 trace_802154_rdev_resume(&rdev->wpan_phy); in rdev_resume()
[all …]
/linux/drivers/regulator/
H A Dcore.c95 struct regulator_dev *rdev; member
99 static int _regulator_is_enabled(struct regulator_dev *rdev);
101 static int _regulator_get_error_flags(struct regulator_dev *rdev, unsigned int *flags);
102 static int _regulator_get_current_limit(struct regulator_dev *rdev);
103 static unsigned int _regulator_get_mode(struct regulator_dev *rdev);
104 static int _notifier_call_chain(struct regulator_dev *rdev,
106 static int _regulator_do_set_voltage(struct regulator_dev *rdev,
108 static int regulator_balance_voltage(struct regulator_dev *rdev,
110 static struct regulator *create_regulator(struct regulator_dev *rdev,
116 const char *rdev_get_name(struct regulator_dev *rdev) in rdev_get_name() argument
[all …]
H A Dhelpers.c28 int regulator_is_enabled_regmap(struct regulator_dev *rdev) in regulator_is_enabled_regmap() argument
33 ret = regmap_read(rdev->regmap, rdev->desc->enable_reg, &val); in regulator_is_enabled_regmap()
37 val &= rdev->desc->enable_mask; in regulator_is_enabled_regmap()
39 if (rdev->desc->enable_is_inverted) { in regulator_is_enabled_regmap()
40 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
41 return val != rdev->desc->enable_val; in regulator_is_enabled_regmap()
44 if (rdev->desc->enable_val) in regulator_is_enabled_regmap()
45 return val == rdev->desc->enable_val; in regulator_is_enabled_regmap()
60 int regulator_enable_regmap(struct regulator_dev *rdev) in regulator_enable_regmap() argument
64 if (rdev->desc->enable_is_inverted) { in regulator_enable_regmap()
[all …]
/linux/drivers/infiniband/hw/bnxt_re/
H A Dmain.c82 static int bnxt_re_hwrm_qcaps(struct bnxt_re_dev *rdev);
83 static int bnxt_re_query_hwrm_intf_version(struct bnxt_re_dev *rdev);
85 static int bnxt_re_hwrm_qcfg(struct bnxt_re_dev *rdev, u32 *db_len,
89 static void bnxt_re_set_db_offset(struct bnxt_re_dev *rdev) in bnxt_re_set_db_offset() argument
99 res = &rdev->qplib_res; in bnxt_re_set_db_offset()
100 en_dev = rdev->en_dev; in bnxt_re_set_db_offset()
101 cctx = rdev->chip_ctx; in bnxt_re_set_db_offset()
104 rc = bnxt_re_hwrm_qcfg(rdev, &l2db_len, &offset); in bnxt_re_set_db_offset()
106 dev_info(rdev_to_dev(rdev), in bnxt_re_set_db_offset()
126 dev_info(rdev_to_dev(rdev), "Low latency framework is enabled\n"); in bnxt_re_set_db_offset()
[all …]
/linux/drivers/infiniband/hw/bng_re/
H A Dbng_dev.c26 struct bng_re_dev *rdev; in bng_re_dev_add() local
29 rdev = ib_alloc_device(bng_re_dev, ibdev); in bng_re_dev_add()
30 if (!rdev) { in bng_re_dev_add()
36 rdev->netdev = aux_dev->net; in bng_re_dev_add()
37 rdev->aux_dev = aux_dev; in bng_re_dev_add()
38 rdev->adev = adev; in bng_re_dev_add()
39 rdev->fn_id = rdev->aux_dev->pdev->devfn; in bng_re_dev_add()
41 return rdev; in bng_re_dev_add()
45 static int bng_re_register_netdev(struct bng_re_dev *rdev) in bng_re_register_netdev() argument
49 aux_dev = rdev->aux_dev; in bng_re_register_netdev()
[all …]
/linux/drivers/infiniband/hw/cxgb4/
H A Dresource.c38 static int c4iw_init_qid_table(struct c4iw_rdev *rdev) in c4iw_init_qid_table() argument
42 if (c4iw_id_table_alloc(&rdev->resource.qid_table, in c4iw_init_qid_table()
43 rdev->lldi.vr->qp.start, in c4iw_init_qid_table()
44 rdev->lldi.vr->qp.size, in c4iw_init_qid_table()
45 rdev->lldi.vr->qp.size, 0)) in c4iw_init_qid_table()
48 for (i = rdev->lldi.vr->qp.start; in c4iw_init_qid_table()
49 i < rdev->lldi.vr->qp.start + rdev->lldi.vr->qp.size; i++) in c4iw_init_qid_table()
50 if (!(i & rdev->qpmask)) in c4iw_init_qid_table()
51 c4iw_id_free(&rdev->resource.qid_table, i); in c4iw_init_qid_table()
56 int c4iw_init_resource(struct c4iw_rdev *rdev, u32 nr_tpt, in c4iw_init_resource() argument
[all …]

12345678910>>...19