/linux/lib/zstd/ |
H A D | zstd_compress_module.c | 27 static size_t zstd_cctx_init(zstd_cctx *cctx, const zstd_parameters *parameters, in zstd_cctx_init() argument 31 cctx, ZSTD_reset_session_and_parameters)); in zstd_cctx_init() 33 cctx, pledged_src_size)); in zstd_cctx_init() 35 cctx, ZSTD_c_windowLog, parameters->cParams.windowLog)); in zstd_cctx_init() 37 cctx, ZSTD_c_hashLog, parameters->cParams.hashLog)); in zstd_cctx_init() 39 cctx, ZSTD_c_chainLog, parameters->cParams.chainLog)); in zstd_cctx_init() 41 cctx, ZSTD_c_searchLog, parameters->cParams.searchLog)); in zstd_cctx_init() 43 cctx, ZSTD_c_minMatch, parameters->cParams.minMatch)); in zstd_cctx_init() 45 cctx, ZSTD_c_targetLength, parameters->cParams.targetLength)); in zstd_cctx_init() 47 cctx, ZSTD_c_strategy, parameters->cParams.strategy)); in zstd_cctx_init() [all …]
|
/linux/tools/testing/selftests/bpf/progs/ |
H A D | crypto_basic.c | 21 struct bpf_crypto_ctx *cctx; in crypto_release() local 26 cctx = bpf_crypto_ctx_create(¶ms, sizeof(params), &err); in crypto_release() 28 if (!cctx) { in crypto_release() 33 bpf_crypto_ctx_release(cctx); in crypto_release() 47 struct bpf_crypto_ctx *cctx; in crypto_acquire() local 52 cctx = bpf_crypto_ctx_create(¶ms, sizeof(params), &err); in crypto_acquire() 54 if (!cctx) { in crypto_acquire() 59 cctx = bpf_crypto_ctx_acquire(cctx); in crypto_acquire() 60 if (!cctx) in crypto_acquire() 63 bpf_crypto_ctx_release(cctx); in crypto_acquire()
|
H A D | crypto_bench.c | 24 struct bpf_crypto_ctx *cctx; in crypto_setup() local 41 cctx = bpf_crypto_ctx_create(¶ms, sizeof(params), &err); in crypto_setup() 43 if (!cctx) { in crypto_setup() 48 err = crypto_ctx_insert(cctx); in crypto_setup()
|
H A D | crypto_sanity.c | 61 struct bpf_crypto_ctx *cctx; in skb_crypto_setup() local 73 cctx = bpf_crypto_ctx_create(¶ms, sizeof(params), &err); in skb_crypto_setup() 75 if (!cctx) { in skb_crypto_setup() 80 err = crypto_ctx_insert(cctx); in skb_crypto_setup()
|
/linux/lib/zstd/compress/ |
H A D | zstd_compress.c | 94 static void ZSTD_initCCtx(ZSTD_CCtx* cctx, ZSTD_customMem memManager) in ZSTD_initCCtx() argument 96 assert(cctx != NULL); in ZSTD_initCCtx() 97 ZSTD_memset(cctx, 0, sizeof(*cctx)); in ZSTD_initCCtx() 98 cctx->customMem = memManager; in ZSTD_initCCtx() 99 cctx->bmi2 = ZSTD_cpuSupportsBmi2(); in ZSTD_initCCtx() 100 { size_t const err = ZSTD_CCtx_reset(cctx, ZSTD_reset_parameters); in ZSTD_initCCtx() 111 { ZSTD_CCtx* const cctx = (ZSTD_CCtx*)ZSTD_customMalloc(sizeof(ZSTD_CCtx), customMem); in ZSTD_createCCtx_advanced() local 112 if (!cctx) return NULL; in ZSTD_createCCtx_advanced() 113 ZSTD_initCCtx(cctx, customMem); in ZSTD_createCCtx_advanced() 114 return cctx; in ZSTD_createCCtx_advanced() [all …]
|
/linux/drivers/misc/ |
H A D | fastrpc.c | 253 struct fastrpc_channel_ctx *cctx; member 287 struct fastrpc_channel_ctx *cctx; member 298 struct fastrpc_channel_ctx *cctx; member 320 int vmid = map->fl->cctx->vmperms[0].vmid; in fastrpc_free_map() 456 struct device *rdev = &fl->cctx->rpdev->dev; in fastrpc_remote_heap_alloc() 463 struct fastrpc_channel_ctx *cctx; in fastrpc_channel_ctx_free() local 465 cctx = container_of(ref, struct fastrpc_channel_ctx, refcount); in fastrpc_channel_ctx_free() 467 kfree(cctx); in fastrpc_channel_ctx_free() 470 static void fastrpc_channel_ctx_get(struct fastrpc_channel_ctx *cctx) in fastrpc_channel_ctx_get() argument 472 kref_get(&cctx->refcount); in fastrpc_channel_ctx_get() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | chan.c | 41 nvkm_chan_cctx_bind(struct nvkm_chan *chan, struct nvkm_engn *engn, struct nvkm_cctx *cctx) in nvkm_chan_cctx_bind() argument 50 CHAN_TRACE(chan, "%sbind cctx %d[%s]", cctx ? "" : "un", engn->id, engine->subdev.name); in nvkm_chan_cctx_bind() 62 engn->func->bind(engn, cctx, chan); in nvkm_chan_cctx_bind() 74 struct nvkm_cctx *cctx = *pcctx; in nvkm_chan_cctx_put() local 76 if (cctx) { in nvkm_chan_cctx_put() 77 struct nvkm_engn *engn = cctx->vctx->ectx->engn; in nvkm_chan_cctx_put() 79 if (refcount_dec_and_mutex_lock(&cctx->refs, &chan->cgrp->mutex)) { in nvkm_chan_cctx_put() 81 nvkm_cgrp_vctx_put(chan->cgrp, &cctx->vctx); in nvkm_chan_cctx_put() 82 list_del(&cctx->head); in nvkm_chan_cctx_put() 83 kfree(cctx); in nvkm_chan_cctx_put() [all …]
|
H A D | uchan.c | 70 struct nvkm_cctx *cctx; member 79 struct nvkm_cctx *cctx = uobj->cctx; in nvkm_uchan_object_fini_1() local 80 struct nvkm_ectx *ectx = cctx->vctx->ectx; in nvkm_uchan_object_fini_1() 86 if (refcount_dec_and_mutex_lock(&cctx->uses, &chan->cgrp->mutex)) { in nvkm_uchan_object_fini_1() 102 struct nvkm_cctx *cctx = uobj->cctx; in nvkm_uchan_object_init_0() local 103 struct nvkm_ectx *ectx = cctx->vctx->ectx; in nvkm_uchan_object_init_0() 110 if (!refcount_inc_not_zero(&cctx->uses)) { in nvkm_uchan_object_init_0() 112 if (!refcount_inc_not_zero(&cctx->uses)) { in nvkm_uchan_object_init_0() 120 nvkm_chan_cctx_bind(chan, ectx->engn, cctx); in nvkm_uchan_object_init_0() 121 refcount_set(&cctx->uses, 1); in nvkm_uchan_object_init_0() [all …]
|
H A D | gv100.c | 92 gv100_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_bind() argument 96 if (cctx) { in gv100_ectx_bind() 97 addr = cctx->vctx->vma->addr; in gv100_ectx_bind() 104 nvkm_mo32(chan->inst, 0x0ac, 0x00010000, cctx ? 0x00010000 : 0x00000000); in gv100_ectx_bind() 117 gv100_ectx_ce_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_ce_bind() argument 119 const u64 bar2 = cctx ? nvkm_memory_bar2(cctx->vctx->inst->memory) : 0ULL; in gv100_ectx_ce_bind() 124 nvkm_mo32(chan->inst, 0x0ac, 0x00020000, cctx ? 0x00020000 : 0x00000000); in gv100_ectx_ce_bind()
|
H A D | g84.c | 106 g84_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in g84_ectx_bind() argument 129 if (!cctx) { in g84_ectx_bind() 139 start = cctx->vctx->inst->addr; in g84_ectx_bind() 140 limit = start + cctx->vctx->inst->size - 1; in g84_ectx_bind()
|
H A D | nv40.c | 125 nv40_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv40_ectx_bind() argument 149 if (cctx) in nv40_ectx_bind() 150 inst = cctx->vctx->inst->addr >> 4; in nv40_ectx_bind()
|
H A D | nv50.c | 150 nv50_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv50_ectx_bind() argument 165 if (!cctx) { in nv50_ectx_bind() 189 start = cctx->vctx->inst->addr; in nv50_ectx_bind() 190 limit = start + cctx->vctx->inst->size - 1; in nv50_ectx_bind()
|
H A D | gk104.c | 134 gk104_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gk104_ectx_bind() argument 162 if (cctx) { in gk104_ectx_bind() 163 addr = cctx->vctx->vma->addr; in gk104_ectx_bind()
|
/linux/include/linux/ |
H A D | zstd_lib.h | 195 ZSTDLIB_API size_t ZSTD_freeCCtx(ZSTD_CCtx* cctx); /* accept NULL pointer */ 205 ZSTDLIB_API size_t ZSTD_compressCCtx(ZSTD_CCtx* cctx, 462 ZSTDLIB_API size_t ZSTD_CCtx_setParameter(ZSTD_CCtx* cctx, ZSTD_cParameter param, int value); 479 ZSTDLIB_API size_t ZSTD_CCtx_setPledgedSrcSize(ZSTD_CCtx* cctx, unsigned long long pledgedSrcSize); 501 ZSTDLIB_API size_t ZSTD_CCtx_reset(ZSTD_CCtx* cctx, ZSTD_ResetDirective reset); 513 ZSTDLIB_API size_t ZSTD_compress2( ZSTD_CCtx* cctx, 704 ZSTDLIB_API size_t ZSTD_compressStream2( ZSTD_CCtx* cctx, 862 ZSTDLIB_API size_t ZSTD_compress_usingCDict(ZSTD_CCtx* cctx, 951 ZSTDLIB_API size_t ZSTD_CCtx_loadDictionary(ZSTD_CCtx* cctx, const void* dict, size_t dictSize); 965 ZSTDLIB_API size_t ZSTD_CCtx_refCDict(ZSTD_CCtx* cctx, const ZSTD_CDict* cdict); [all …]
|
H A D | zstd.h | 217 size_t zstd_compress_cctx(zstd_cctx *cctx, void *dst, size_t dst_capacity, 234 size_t zstd_free_cctx(zstd_cctx* cctx); 281 size_t zstd_compress_using_cdict(zstd_cctx *cctx, void *dst,
|
/linux/drivers/infiniband/hw/bnxt_re/ |
H A D | qplib_res.h | 304 struct bnxt_qplib_chip_ctx *cctx; member 321 static inline bool bnxt_qplib_is_chip_gen_p7(struct bnxt_qplib_chip_ctx *cctx) in bnxt_qplib_is_chip_gen_p7() argument 323 return (cctx->chip_num == CHIP_NUM_58818 || in bnxt_qplib_is_chip_gen_p7() 324 cctx->chip_num == CHIP_NUM_57608); in bnxt_qplib_is_chip_gen_p7() 327 static inline bool bnxt_qplib_is_chip_gen_p5(struct bnxt_qplib_chip_ctx *cctx) in bnxt_qplib_is_chip_gen_p5() argument 329 return (cctx->chip_num == CHIP_NUM_57508 || in bnxt_qplib_is_chip_gen_p5() 330 cctx->chip_num == CHIP_NUM_57504 || in bnxt_qplib_is_chip_gen_p5() 331 cctx->chip_num == CHIP_NUM_57502); in bnxt_qplib_is_chip_gen_p5() 334 static inline bool bnxt_qplib_is_chip_gen_p5_p7(struct bnxt_qplib_chip_ctx *cctx) in bnxt_qplib_is_chip_gen_p5_p7() argument 336 return bnxt_qplib_is_chip_gen_p5(cctx) || bnxt_qplib_is_chip_gen_p7(cctx); in bnxt_qplib_is_chip_gen_p5_p7() [all …]
|
H A D | main.c | 90 struct bnxt_qplib_chip_ctx *cctx; in bnxt_re_set_db_offset() local 100 cctx = rdev->chip_ctx; in bnxt_re_set_db_offset() 108 if (bnxt_qplib_is_chip_gen_p7(cctx)) { in bnxt_re_set_db_offset() 123 if (cctx->modes.db_push && l2db_len && en_dev->l2_db_size != barlen) { in bnxt_re_set_db_offset() 131 struct bnxt_qplib_chip_ctx *cctx; in bnxt_re_set_drv_mode() local 133 cctx = rdev->chip_ctx; in bnxt_re_set_drv_mode() 134 cctx->modes.wqe_mode = bnxt_qplib_is_chip_gen_p7(rdev->chip_ctx) ? in bnxt_re_set_drv_mode() 140 cctx->modes.toggle_bits |= BNXT_QPLIB_CQ_TOGGLE_BIT; in bnxt_re_set_drv_mode() 141 cctx->modes.toggle_bits |= BNXT_QPLIB_SRQ_TOGGLE_BIT; in bnxt_re_set_drv_mode() 158 rdev->qplib_res.cctx = NULL; in bnxt_re_destroy_chip_ctx() [all …]
|
H A D | qplib_sp.c | 62 if (!bnxt_qplib_is_chip_gen_p5_p7(rcfw->res->cctx)) in bnxt_qplib_is_atomic_cap() 98 struct bnxt_qplib_chip_ctx *cctx; in bnxt_qplib_get_dev_attr() local 104 cctx = rcfw->res->cctx; in bnxt_qplib_get_dev_attr() 133 if (!bnxt_qplib_is_chip_gen_p5_p7(rcfw->res->cctx)) { in bnxt_qplib_get_dev_attr() 142 if (cctx->modes.wqe_mode == BNXT_QPLIB_WQE_MODE_VARIABLE) in bnxt_qplib_get_dev_attr() 145 attr->max_qp_sges = cctx->modes.wqe_mode == BNXT_QPLIB_WQE_MODE_VARIABLE ? in bnxt_qplib_get_dev_attr() 149 if (!bnxt_qplib_is_chip_gen_p7(rcfw->res->cctx)) in bnxt_qplib_get_dev_attr() 165 if (!bnxt_qplib_is_chip_gen_p7(rcfw->res->cctx)) in bnxt_qplib_get_dev_attr() 190 if (rcfw->res->cctx->hwrm_intf_ver >= HWRM_VERSION_DEV_ATTR_MAX_DPI) in bnxt_qplib_get_dev_attr() 965 if (bnxt_qplib_is_chip_gen_p5_p7(res->cctx)) { in bnxt_qplib_modify_cc() [all …]
|
H A D | qplib_rcfw.c | 784 rcfw->res->cctx, true); in bnxt_qplib_service_creq() 854 if (is_virtfn || bnxt_qplib_is_chip_gen_p5_p7(rcfw->res->cctx)) in bnxt_qplib_init_rcfw() 980 rcfw->max_timeout = res->cctx->hwrm_cmd_max_timeout; in bnxt_qplib_alloc_rcfw_channel() 1000 bnxt_qplib_ring_nq_db(&creq->creq_db.dbinfo, rcfw->res->cctx, false); in bnxt_qplib_rcfw_stop_irq() 1064 bnxt_qplib_ring_nq_db(&creq->creq_db.dbinfo, res->cctx, true); in bnxt_qplib_rcfw_start_irq()
|
/linux/drivers/block/zram/ |
H A D | backend_deflate.c | 15 struct z_stream_s cctx; member 38 if (zctx->cctx.workspace) { in deflate_destroy() 39 zlib_deflateEnd(&zctx->cctx); in deflate_destroy() 40 vfree(zctx->cctx.workspace); in deflate_destroy() 61 zctx->cctx.workspace = vzalloc(sz); in deflate_create() 62 if (!zctx->cctx.workspace) in deflate_create() 65 ret = zlib_deflateInit2(&zctx->cctx, params->level, Z_DEFLATED, in deflate_create() 94 deflate = &zctx->cctx; in deflate_compress()
|
H A D | backend_zstd.c | 11 zstd_cctx *cctx; member 118 zstd_free_cctx(zctx->cctx); in zstd_destroy() 146 zctx->cctx = zstd_init_cctx(zctx->cctx_mem, sz); in zstd_create() 147 if (!zctx->cctx) in zstd_create() 161 zctx->cctx = zstd_create_cctx_advanced(zp->custom_mem); in zstd_create() 162 if (!zctx->cctx) in zstd_create() 186 ret = zstd_compress_cctx(zctx->cctx, req->dst, req->dst_len, in zstd_compress() 189 ret = zstd_compress_using_cdict(zctx->cctx, req->dst, in zstd_compress()
|
/linux/drivers/pinctrl/intel/ |
H A D | pinctrl-cherryview.c | 756 struct intel_community_context *cctx = &pctrl->context.communities[0]; in chv_gpio_request_enable() local 760 for (i = 0; i < ARRAY_SIZE(cctx->intr_lines); i++) { in chv_gpio_request_enable() 761 if (cctx->intr_lines[i] == offset) { in chv_gpio_request_enable() 762 cctx->intr_lines[i] = CHV_INVALID_HWIRQ; in chv_gpio_request_enable() 1248 struct intel_community_context *cctx = &pctrl->context.communities[0]; in chv_gpio_irq_startup() local 1263 if (cctx->intr_lines[intsel] == CHV_INVALID_HWIRQ) { in chv_gpio_irq_startup() 1267 cctx->intr_lines[intsel] = hwirq; in chv_gpio_irq_startup() 1278 struct intel_community_context *cctx = &pctrl->context.communities[0]; in chv_gpio_set_intr_line() local 1286 if (cctx->intr_lines[intsel] == pin) in chv_gpio_set_intr_line() 1289 if (cctx->intr_lines[intsel] == CHV_INVALID_HWIRQ) { in chv_gpio_set_intr_line() [all …]
|
/linux/crypto/ |
H A D | zstd.c | 21 zstd_cctx *cctx; member 44 ctx->cctx = zstd_init_cctx(ctx->cwksp, wksp_size); in zstd_comp_init() 45 if (!ctx->cctx) { in zstd_comp_init() 83 ctx->cctx = NULL; in zstd_comp_exit() 157 out_len = zstd_compress_cctx(zctx->cctx, dst, *dlen, src, slen, ¶ms); in __zstd_compress()
|
H A D | camellia_generic.c | 963 struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); in camellia_set_key() local 969 cctx->key_length = key_len; in camellia_set_key() 973 camellia_setup128(key, cctx->key_table); in camellia_set_key() 976 camellia_setup192(key, cctx->key_table); in camellia_set_key() 979 camellia_setup256(key, cctx->key_table); in camellia_set_key() 988 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); in camellia_encrypt() local 998 if (cctx->key_length == 16) in camellia_encrypt() 1003 camellia_do_encrypt(cctx->key_table, tmp, max); in camellia_encrypt() 1014 const struct camellia_ctx *cctx = crypto_tfm_ctx(tfm); in camellia_decrypt() local 1024 if (cctx->key_length == 16) in camellia_decrypt() [all …]
|
/linux/arch/x86/crypto/ |
H A D | camellia.h | 22 extern int __camellia_setkey(struct camellia_ctx *cctx,
|