Home
last modified time | relevance | path

Searched refs:vd (Results 1 – 25 of 129) sorted by relevance

123456

/linux/lib/vdso/
H A Dgettimeofday.c11 # define VDSO_DELTA_MASK(vd) ULLONG_MAX argument
13 # define VDSO_DELTA_MASK(vd) (vd->mask) argument
17 static __always_inline bool vdso_delta_ok(const struct vdso_data *vd, u64 delta) in vdso_delta_ok() argument
19 return delta < vd->max_cycles; in vdso_delta_ok()
22 static __always_inline bool vdso_delta_ok(const struct vdso_data *vd, u64 delta) in vdso_delta_ok() argument
39 static __always_inline u64 vdso_calc_ns(const struct vdso_data *vd, u64 cycles, u64 base) in vdso_calc_ns() argument
41 u64 delta = (cycles - vd->cycle_last) & VDSO_DELTA_MASK(vd); in vdso_calc_ns()
43 if (likely(vdso_delta_ok(vd, delta))) in vdso_calc_ns()
44 return vdso_shift_ns((delta * vd->mult) + base, vd->shift); in vdso_calc_ns()
46 return mul_u64_u32_add_u64_shr(delta, vd->mult, base, vd->shift); in vdso_calc_ns()
[all …]
/linux/drivers/dma/
H A Dvirt-dma.c22 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_submit() local
29 list_move_tail(&vd->node, &vc->desc_submitted); in vchan_tx_submit()
33 vc, vd, cookie); in vchan_tx_submit()
52 struct virt_dma_desc *vd = to_virt_desc(tx); in vchan_tx_desc_free() local
56 list_del(&vd->node); in vchan_tx_desc_free()
60 vc, vd, vd->tx.cookie); in vchan_tx_desc_free()
61 vc->desc_free(vd); in vchan_tx_desc_free()
69 struct virt_dma_desc *vd; in vchan_find_desc() local
71 list_for_each_entry(vd, &vc->desc_issued, node) in vchan_find_desc()
72 if (vd->tx.cookie == cookie) in vchan_find_desc()
[all …]
H A Dloongson1-apb-dma.c60 struct virt_dma_desc vd; member
90 container_of(d, struct ls1x_dma_desc, vd)
189 static void ls1x_dma_free_desc(struct virt_dma_desc *vd) in ls1x_dma_free_desc() argument
191 struct ls1x_dma_desc *desc = to_ls1x_dma_desc(vd); in ls1x_dma_free_desc()
192 struct ls1x_dma_chan *chan = to_ls1x_dma_chan(vd->tx.chan); in ls1x_dma_free_desc()
310 ls1x_dma_free_desc(&desc->vd); in ls1x_dma_prep_slave_sg()
314 return vchan_tx_prep(to_virt_chan(dchan), &desc->vd, flags); in ls1x_dma_prep_slave_sg()
354 ls1x_dma_free_desc(&desc->vd); in ls1x_dma_prep_dma_cyclic()
358 return vchan_tx_prep(to_virt_chan(dchan), &desc->vd, flags); in ls1x_dma_prep_dma_cyclic()
400 struct virt_dma_desc *vd; in ls1x_dma_terminate_all() local
[all …]
H A Duniphier-mdmac.c52 struct virt_dma_desc vd; member
81 to_uniphier_mdmac_desc(struct virt_dma_desc *vd) in to_uniphier_mdmac_desc() argument
83 return container_of(vd, struct uniphier_mdmac_desc, vd); in to_uniphier_mdmac_desc()
90 struct virt_dma_desc *vd; in uniphier_mdmac_next_desc() local
92 vd = vchan_next_desc(&mc->vc); in uniphier_mdmac_next_desc()
93 if (!vd) { in uniphier_mdmac_next_desc()
98 list_del(&vd->node); in uniphier_mdmac_next_desc()
100 mc->md = to_uniphier_mdmac_desc(vd); in uniphier_mdmac_next_desc()
210 vchan_cookie_complete(&md->vd); in uniphier_mdmac_interrupt()
249 return vchan_tx_prep(vc, &md->vd, flags); in uniphier_mdmac_prep_slave_sg()
[all …]
H A Dpxa_dma.c83 struct virt_dma_desc vd; /* Virtual descriptor */ member
143 container_of((_vd), struct pxad_desc_sw, vd)
534 static bool is_desc_completed(struct virt_dma_desc *vd) in is_desc_completed() argument
536 struct pxad_desc_sw *sw_desc = to_pxad_sw_desc(vd); in is_desc_completed()
555 struct virt_dma_desc *vd) in pxad_try_hotchain() argument
571 to_pxad_sw_desc(vd)->misaligned) in pxad_try_hotchain()
576 pxad_desc_chain(vd_last_issued, vd); in pxad_try_hotchain()
577 if (is_chan_running(chan) || is_desc_completed(vd)) in pxad_try_hotchain()
607 struct virt_dma_desc *vd, *tmp; in pxad_chan_handler() local
619 list_for_each_entry_safe(vd, tmp, &chan->vc.desc_issued, node) { in pxad_chan_handler()
[all …]
H A Dmilbeaut-xdmac.c62 struct virt_dma_desc vd; member
87 to_milbeaut_xdmac_desc(struct virt_dma_desc *vd) in to_milbeaut_xdmac_desc() argument
89 return container_of(vd, struct milbeaut_xdmac_desc, vd); in to_milbeaut_xdmac_desc()
96 struct virt_dma_desc *vd; in milbeaut_xdmac_next_desc() local
98 vd = vchan_next_desc(&mc->vc); in milbeaut_xdmac_next_desc()
99 if (!vd) { in milbeaut_xdmac_next_desc()
104 list_del(&vd->node); in milbeaut_xdmac_next_desc()
106 mc->md = to_milbeaut_xdmac_desc(vd); in milbeaut_xdmac_next_desc()
175 vchan_cookie_complete(&md->vd); in milbeaut_xdmac_interrupt()
203 return vchan_tx_prep(vc, &md->vd, flags); in milbeaut_xdmac_prep_memcpy()
[all …]
H A Dmilbeaut-hdmac.c60 struct virt_dma_desc vd; member
90 to_milbeaut_hdmac_desc(struct virt_dma_desc *vd) in to_milbeaut_hdmac_desc() argument
92 return container_of(vd, struct milbeaut_hdmac_desc, vd); in to_milbeaut_hdmac_desc()
99 struct virt_dma_desc *vd; in milbeaut_hdmac_next_desc() local
101 vd = vchan_next_desc(&mc->vc); in milbeaut_hdmac_next_desc()
102 if (!vd) { in milbeaut_hdmac_next_desc()
107 list_del(&vd->node); in milbeaut_hdmac_next_desc()
109 mc->md = to_milbeaut_hdmac_desc(vd); in milbeaut_hdmac_next_desc()
194 vchan_cookie_complete(&md->vd); in milbeaut_hdmac_interrupt()
284 return vchan_tx_prep(vc, &md->vd, flags); in milbeaut_hdmac_prep_slave_sg()
[all …]
H A Dmoxart-dma.c125 struct virt_dma_desc vd; member
170 return container_of(t, struct moxart_desc, vd.tx); in to_moxart_dma_desc()
173 static void moxart_dma_desc_free(struct virt_dma_desc *vd) in moxart_dma_desc_free() argument
175 kfree(container_of(vd, struct moxart_desc, vd)); in moxart_dma_desc_free()
190 moxart_dma_desc_free(&ch->desc->vd); in moxart_terminate_all()
320 return vchan_tx_prep(&ch->vc, &d->vd, tx_flags); in moxart_prep_slave_sg()
413 struct virt_dma_desc *vd; in moxart_dma_start_desc() local
415 vd = vchan_next_desc(&ch->vc); in moxart_dma_start_desc()
417 if (!vd) { in moxart_dma_start_desc()
422 list_del(&vd->node); in moxart_dma_start_desc()
[all …]
H A Damba-pl08x.c199 struct virt_dma_desc vd; member
284 const struct vendor_data *vd; member
322 return container_of(tx, struct pl08x_txd, vd.tx); in to_pl08x_txd()
394 if (pl08x->vd->pl080s) in pl08x_write_lli()
514 if (pl08x->vd->pl080s) in pl08x_write_lli()
531 struct virt_dma_desc *vd = vchan_next_desc(&plchan->vc); in pl08x_start_next_txd() local
532 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx); in pl08x_start_next_txd()
535 list_del(&txd->vd.node); in pl08x_start_next_txd()
825 for (i = 0; i < pl08x->vd->channels; i++) { in pl08x_get_phy_channel()
839 if (i == pl08x->vd->channels) { in pl08x_get_phy_channel()
[all …]
/linux/include/vdso/
H A Dhelpers.h10 static __always_inline u32 vdso_read_begin(const struct vdso_data *vd) in vdso_read_begin() argument
14 while (unlikely((seq = READ_ONCE(vd->seq)) & 1)) in vdso_read_begin()
21 static __always_inline u32 vdso_read_retry(const struct vdso_data *vd, in vdso_read_retry() argument
27 seq = READ_ONCE(vd->seq); in vdso_read_retry()
31 static __always_inline void vdso_write_begin(struct vdso_data *vd) in vdso_write_begin() argument
38 WRITE_ONCE(vd[CS_HRES_COARSE].seq, vd[CS_HRES_COARSE].seq + 1); in vdso_write_begin()
39 WRITE_ONCE(vd[CS_RAW].seq, vd[CS_RAW].seq + 1); in vdso_write_begin()
43 static __always_inline void vdso_write_end(struct vdso_data *vd) in vdso_write_end() argument
51 WRITE_ONCE(vd[CS_HRES_COARSE].seq, vd[CS_HRES_COARSE].seq + 1); in vdso_write_end()
52 WRITE_ONCE(vd[CS_RAW].seq, vd[CS_RAW].seq + 1); in vdso_write_end()
/linux/arch/powerpc/kernel/vdso/
H A Dvgettimeofday.c10 const struct vdso_data *vd) in __c_kernel_clock_gettime() argument
12 return __cvdso_clock_gettime_data(vd, clock, ts); in __c_kernel_clock_gettime()
16 const struct vdso_data *vd) in __c_kernel_clock_getres() argument
18 return __cvdso_clock_getres_data(vd, clock_id, res); in __c_kernel_clock_getres()
22 const struct vdso_data *vd) in __c_kernel_clock_gettime() argument
24 return __cvdso_clock_gettime32_data(vd, clock, ts); in __c_kernel_clock_gettime()
28 const struct vdso_data *vd) in __c_kernel_clock_gettime64() argument
30 return __cvdso_clock_gettime_data(vd, clock, ts); in __c_kernel_clock_gettime64()
34 const struct vdso_data *vd) in __c_kernel_clock_getres() argument
36 return __cvdso_clock_getres_time32_data(vd, clock_id, res); in __c_kernel_clock_getres()
[all …]
/linux/arch/powerpc/include/asm/vdso/
H A Dgettimeofday.h97 const struct vdso_data *vd) in __arch_get_hw_counter() argument
106 const struct vdso_data *__arch_get_timens_vdso_data(const struct vdso_data *vd) in __arch_get_timens_vdso_data() argument
108 return (void *)vd + (1U << CONFIG_PAGE_SHIFT); in __arch_get_timens_vdso_data()
112 static inline bool vdso_clocksource_ok(const struct vdso_data *vd) in vdso_clocksource_ok() argument
138 const struct vdso_data *vd);
140 const struct vdso_data *vd);
143 const struct vdso_data *vd);
145 const struct vdso_data *vd);
147 const struct vdso_data *vd);
150 const struct vdso_data *vd);
[all …]
/linux/arch/x86/include/asm/vdso/
H A Dgettimeofday.h62 const struct vdso_data *__arch_get_timens_vdso_data(const struct vdso_data *vd) in __arch_get_timens_vdso_data() argument
251 const struct vdso_data *vd) in __arch_get_hw_counter() argument
281 static inline bool arch_vdso_clocksource_ok(const struct vdso_data *vd) in arch_vdso_clocksource_ok() argument
320 static __always_inline u64 vdso_calc_ns(const struct vdso_data *vd, u64 cycles, u64 base) in vdso_calc_ns() argument
322 u64 delta = cycles - vd->cycle_last; in vdso_calc_ns()
334 if (unlikely(delta > vd->max_cycles)) { in vdso_calc_ns()
341 return base >> vd->shift; in vdso_calc_ns()
344 return mul_u64_u32_add_u64_shr(delta & S64_MAX, vd->mult, base, vd->shift); in vdso_calc_ns()
347 return ((delta * vd->mult) + base) >> vd->shift; in vdso_calc_ns()
/linux/arch/powerpc/kernel/
H A Dvecemu.c266 unsigned int va, vb, vc, vd; in emulate_altivec() local
275 vd = (word >> 21) & 0x1f; in emulate_altivec()
285 vaddfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec()
288 vsubfp(&vrs[vd], &vrs[va], &vrs[vb]); in emulate_altivec()
291 vrefp(&vrs[vd], &vrs[vb]); in emulate_altivec()
294 vrsqrtefp(&vrs[vd], &vrs[vb]); in emulate_altivec()
298 vrs[vd].u[i] = eexp2(vrs[vb].u[i]); in emulate_altivec()
302 vrs[vd].u[i] = elog2(vrs[vb].u[i]); in emulate_altivec()
306 vrs[vd].u[i] = rfin(vrs[vb].u[i]); in emulate_altivec()
310 vrs[vd].u[i] = rfiz(vrs[vb].u[i]); in emulate_altivec()
[all …]
/linux/drivers/dma/ptdma/
H A Dptdma-dmaengine.c21 static inline struct pt_dma_desc *to_pt_desc(struct virt_dma_desc *vd) in to_pt_desc() argument
23 return container_of(vd, struct pt_dma_desc, vd); in to_pt_desc()
40 static void pt_do_cleanup(struct virt_dma_desc *vd) in pt_do_cleanup() argument
42 struct pt_dma_desc *desc = to_pt_desc(vd); in pt_do_cleanup()
73 struct virt_dma_desc *vd = vchan_next_desc(&chan->vc); in pt_next_dma_desc() local
75 return vd ? to_pt_desc(vd) : NULL; in pt_next_dma_desc()
82 struct virt_dma_desc *vd; in pt_handle_active_desc() local
94 tx_desc = &desc->vd.tx; in pt_handle_active_desc()
95 vd = &desc->vd; in pt_handle_active_desc()
109 list_del(&desc->vd.node); in pt_handle_active_desc()
[all …]
/linux/drivers/dma/mediatek/
H A Dmtk-hsdma.c131 struct virt_dma_desc vd; member
147 struct virt_dma_desc *vd; member
258 static struct mtk_hsdma_vdesc *to_hsdma_vdesc(struct virt_dma_desc *vd) in to_hsdma_vdesc() argument
260 return container_of(vd, struct mtk_hsdma_vdesc, vd); in to_hsdma_vdesc()
299 static void mtk_hsdma_vdesc_free(struct virt_dma_desc *vd) in mtk_hsdma_vdesc_free() argument
301 kfree(container_of(vd, struct mtk_hsdma_vdesc, vd)); in mtk_hsdma_vdesc_free()
459 ring->cb[ring->cur_tptr].vd = &hvd->vd; in mtk_hsdma_issue_pending_vdesc()
497 struct virt_dma_desc *vd, *vd2; in mtk_hsdma_issue_vchan_pending() local
502 list_for_each_entry_safe(vd, vd2, &hvc->vc.desc_issued, node) { in mtk_hsdma_issue_vchan_pending()
505 hvd = to_hsdma_vdesc(vd); in mtk_hsdma_issue_vchan_pending()
[all …]
H A Dmtk-cqdma.c79 struct virt_dma_desc vd; member
158 static struct mtk_cqdma_vdesc *to_cqdma_vdesc(struct virt_dma_desc *vd) in to_cqdma_vdesc() argument
160 return container_of(vd, struct mtk_cqdma_vdesc, vd); in to_cqdma_vdesc()
199 static void mtk_cqdma_vdesc_free(struct virt_dma_desc *vd) in mtk_cqdma_vdesc_free() argument
201 kfree(to_cqdma_vdesc(vd)); in mtk_cqdma_vdesc_free()
267 struct virt_dma_desc *vd, *vd2; in mtk_cqdma_issue_vchan_pending() local
275 list_for_each_entry_safe(vd, vd2, &cvc->vc.desc_issued, node) { in mtk_cqdma_issue_vchan_pending()
280 cvd = to_cqdma_vdesc(vd); in mtk_cqdma_issue_vchan_pending()
290 list_del(&vd->node); in mtk_cqdma_issue_vchan_pending()
338 vchan_cookie_complete(&cvd->parent->vd); in mtk_cqdma_consume_work_queue()
[all …]
/linux/arch/powerpc/platforms/pseries/
H A Dmobility.c119 const char *name, u32 vd, char *value) in update_dt_property() argument
130 if (vd & 0x80000000) { in update_dt_property()
131 vd = ~vd + 1; in update_dt_property()
137 char *new_data = kzalloc(new_prop->length + vd, GFP_KERNEL); in update_dt_property()
142 memcpy(new_data + new_prop->length, value, vd); in update_dt_property()
146 new_prop->length += vd; in update_dt_property()
158 new_prop->length = vd; in update_dt_property()
166 memcpy(new_prop->value, value, vd); in update_dt_property()
188 u32 vd; in update_dt_node() local
217 vd = be32_to_cpu(*(__be32 *)prop_data); in update_dt_node()
[all …]
/linux/drivers/clk/versatile/
H A Dicst.c76 unsigned int vd; in icst_hz_to_vco() local
81 vd = (f + fref_div / 2) / fref_div; in icst_hz_to_vco()
82 if (vd < p->vd_min || vd > p->vd_max) in icst_hz_to_vco()
85 f_pll = fref_div * vd; in icst_hz_to_vco()
91 vco.v = vd - 8; in icst_hz_to_vco()
/linux/include/drm/
H A Ddrm_modes.h134 #define DRM_MODE(nm, t, c, hd, hss, hse, ht, hsk, vd, vss, vse, vt, vs, f) \ argument
137 .htotal = (ht), .hskew = (hsk), .vdisplay = (vd), \
149 #define __DRM_MODE_INIT(pix, hd, vd, hd_mm, vd_mm) \ argument
152 .htotal = (hd), .vdisplay = (vd), .vsync_start = (vd), \
153 .vsync_end = (vd), .vtotal = (vd), .width_mm = (hd_mm), \
167 #define DRM_MODE_INIT(hz, hd, vd, hd_mm, vd_mm) \ argument
168 __DRM_MODE_INIT((hd) * (vd) * (hz) / 1000 /* kHz */, hd, vd, hd_mm, vd_mm)
180 #define DRM_SIMPLE_MODE(hd, vd, hd_mm, vd_mm) \ argument
181 __DRM_MODE_INIT(1 /* pass validation */, hd, vd, hd_mm, vd_mm)
/linux/arch/arm/vfp/
H A Dvfpdouble.c54 static void vfp_double_normalise_denormal(struct vfp_double *vd) in vfp_double_normalise_denormal() argument
56 int bits = 31 - fls(vd->significand >> 32); in vfp_double_normalise_denormal()
58 bits = 63 - fls(vd->significand); in vfp_double_normalise_denormal()
60 vfp_double_dump("normalise_denormal: in", vd); in vfp_double_normalise_denormal()
63 vd->exponent -= bits - 1; in vfp_double_normalise_denormal()
64 vd->significand <<= bits; in vfp_double_normalise_denormal()
67 vfp_double_dump("normalise_denormal: out", vd); in vfp_double_normalise_denormal()
70 u32 vfp_double_normaliseround(int dd, struct vfp_double *vd, u32 fpscr, u32 exceptions, const char … in vfp_double_normaliseround() argument
76 vfp_double_dump("pack: in", vd); in vfp_double_normaliseround()
81 if (vd->exponent == 2047 && (vd->significand == 0 || exceptions)) in vfp_double_normaliseround()
[all …]
/linux/sound/core/
H A Dcontrol.c126 if (control->vd[idx].owner == ctl) in snd_ctl_release()
127 control->vd[idx].owner = NULL; in snd_ctl_release()
236 *kctl = kzalloc(struct_size(*kctl, vd, count), GFP_KERNEL); in snd_ctl_new()
242 (*kctl)->vd[idx].access = access; in snd_ctl_new()
243 (*kctl)->vd[idx].owner = file; in snd_ctl_new()
670 if (!(kctl->vd[0].access & SNDRV_CTL_ELEM_ACCESS_USER)) in snd_ctl_remove_user_ctl()
673 if (kctl->vd[idx].owner != NULL && kctl->vd[idx].owner != file) in snd_ctl_remove_user_ctl()
694 struct snd_kcontrol_volatile *vd; in snd_ctl_activate_id() local
705 vd in snd_ctl_activate_id()
1130 struct snd_kcontrol_volatile *vd; __snd_ctl_elem_info() local
1200 struct snd_kcontrol_volatile *vd; snd_ctl_elem_read() local
1271 struct snd_kcontrol_volatile *vd; snd_ctl_elem_write() local
1351 struct snd_kcontrol_volatile *vd; snd_ctl_elem_lock() local
1372 struct snd_kcontrol_volatile *vd; snd_ctl_elem_unlock() local
1804 struct snd_kcontrol_volatile *vd = &kctl->vd[snd_ctl_get_ioff(kctl, id)]; call_tlv_handler() local
1829 struct snd_kcontrol_volatile *vd = &kctl->vd[snd_ctl_get_ioff(kctl, id)]; read_tlv_buf() local
1857 struct snd_kcontrol_volatile *vd; snd_ctl_tlv_ioctl() local
[all...]
/linux/drivers/dma/sh/
H A Dusb-dmac.c52 struct virt_dma_desc vd; member
63 #define to_usb_dmac_desc(vd) container_of(vd, struct usb_dmac_desc, vd) argument
224 struct virt_dma_desc *vd; in usb_dmac_chan_start_desc() local
226 vd = vchan_next_desc(&chan->vc); in usb_dmac_chan_start_desc()
227 if (!vd) { in usb_dmac_chan_start_desc()
237 list_del(&vd->node); in usb_dmac_chan_start_desc()
239 chan->desc = to_usb_dmac_desc(vd); in usb_dmac_chan_start_desc()
442 return vchan_tx_prep(&uchan->vc, &desc->vd, dma_flags); in usb_dmac_prep_slave_sg()
507 struct virt_dma_desc *vd; in usb_dmac_chan_get_residue() local
512 vd = vchan_find_desc(&chan->vc, cookie); in usb_dmac_chan_get_residue()
[all …]
/linux/drivers/misc/cxl/
H A Dflash.c69 u32 vd, char *value) in update_property() argument
85 new_prop->length = vd; in update_property()
92 memcpy(new_prop->value, value, vd); in update_property()
97 dn, name, vd, be32_to_cpu(*val)); in update_property()
116 u32 vd; in update_node() local
144 vd = be32_to_cpu(*(__be32 *)prop_data); in update_node()
145 prop_data += vd + sizeof(vd); in update_node()
154 vd = be32_to_cpu(*(__be32 *)prop_data); in update_node()
155 prop_data += sizeof(vd); in update_node()
157 if ((vd != 0x00000000) && (vd != 0x80000000)) { in update_node()
[all …]
/linux/arch/s390/include/asm/vdso/
H A Dgettimeofday.h22 static inline u64 __arch_get_hw_counter(s32 clock_mode, const struct vdso_data *vd) in __arch_get_hw_counter() argument
27 adj = vd->arch_data.tod_steering_end - now; in __arch_get_hw_counter()
29 now += (vd->arch_data.tod_steering_delta < 0) ? (adj >> 15) : -(adj >> 15); in __arch_get_hw_counter()
54 const struct vdso_data *__arch_get_timens_vdso_data(const struct vdso_data *vd) in __arch_get_timens_vdso_data() argument

123456