Home
last modified time | relevance | path

Searched refs:gctx (Results 1 – 7 of 7) sorted by relevance

/linux/drivers/gpu/drm/radeon/
H A Datom.c184 struct atom_context *gctx = ctx->ctx; in atom_get_src_int() local
193 idx += gctx->reg_block; in atom_get_src_int()
194 switch (gctx->io_mode) { in atom_get_src_int()
196 val = gctx->card->reg_read(gctx->card, idx); in atom_get_src_int()
205 if (!(gctx->io_mode & 0x80)) { in atom_get_src_int()
209 if (!gctx->iio[gctx->io_mode & 0x7F]) { in atom_get_src_int()
211 gctx->io_mode & 0x7F); in atom_get_src_int()
215 atom_iio_execute(gctx, in atom_get_src_int()
216 gctx->iio[gctx->io_mode & 0x7F], in atom_get_src_int()
239 val = gctx->divmul[0]; in atom_get_src_int()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
H A Datom.c186 struct atom_context *gctx = ctx->ctx; in atom_get_src_int() local
195 idx += gctx->reg_block; in atom_get_src_int()
196 switch (gctx->io_mode) { in atom_get_src_int()
198 val = gctx->card->reg_read(gctx->card, idx); in atom_get_src_int()
207 if (!(gctx->io_mode & 0x80)) { in atom_get_src_int()
211 if (!gctx->iio[gctx->io_mode & 0x7F]) { in atom_get_src_int()
213 gctx->io_mode & 0x7F); in atom_get_src_int()
217 atom_iio_execute(gctx, in atom_get_src_int()
218 gctx->iio[gctx->io_mode & 0x7F], in atom_get_src_int()
241 val = gctx->divmul[0]; in atom_get_src_int()
[all …]
/linux/crypto/
H A Dgcm.c227 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_len() local
231 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len()
244 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_len_continue() local
246 return gctx->complete(req, flags); in gcm_hash_len_continue()
288 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_crypt_continue() local
291 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue()
318 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_hash_assoc_remain_continue() local
320 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue()
322 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue()
425 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; in gcm_encrypt_continue() local
[all …]
/linux/net/sunrpc/auth_gss/
H A Dgss_krb5_mech.c523 static u32 gss_krb5_get_mic(struct gss_ctx *gctx, struct xdr_buf *text, in gss_krb5_get_mic() argument
526 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_krb5_get_mic()
544 static u32 gss_krb5_verify_mic(struct gss_ctx *gctx, in gss_krb5_verify_mic() argument
548 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_krb5_verify_mic()
565 static u32 gss_krb5_wrap(struct gss_ctx *gctx, int offset, in gss_krb5_wrap() argument
568 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_krb5_wrap()
587 static u32 gss_krb5_unwrap(struct gss_ctx *gctx, int offset, in gss_krb5_unwrap() argument
590 struct krb5_ctx *kctx = gctx->internal_ctx_id; in gss_krb5_unwrap()
593 &gctx->slack, &gctx->align); in gss_krb5_unwrap()
/linux/drivers/accel/ivpu/
H A Divpu_fw.c309 fw->mem = ivpu_bo_create(vdev, &vdev->gctx, &fw_range, fw->runtime_size, in ivpu_fw_mem_init()
316 ret = ivpu_mmu_context_set_pages_ro(vdev, &vdev->gctx, fw->read_only_addr, in ivpu_fw_mem_init()
345 fw->mem_shave_nn = ivpu_bo_create(vdev, &vdev->gctx, &vdev->hw->ranges.shave, in ivpu_fw_mem_init()
H A Divpu_mmu_context.c596 ivpu_mmu_context_init(vdev, &vdev->gctx, IVPU_GLOBAL_CONTEXT_MMU_SSID); in ivpu_mmu_global_context_init()
601 ivpu_mmu_context_fini(vdev, &vdev->gctx); in ivpu_mmu_global_context_fini()
H A Divpu_gem.c338 return ivpu_bo_create(vdev, &vdev->gctx, &vdev->hw->ranges.global, size, flags); in ivpu_bo_create_global()