| /linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
| H A D | ctxnv50.c | 170 static void nv50_gr_construct_mmio(struct nvkm_grctx *ctx); 171 static void nv50_gr_construct_xfer1(struct nvkm_grctx *ctx); 172 static void nv50_gr_construct_xfer2(struct nvkm_grctx *ctx); 177 nv50_grctx_generate(struct nvkm_grctx *ctx) in nv50_grctx_generate() argument 179 cp_set (ctx, STATE, RUNNING); in nv50_grctx_generate() 180 cp_set (ctx, XFER_SWITCH, ENABLE); in nv50_grctx_generate() 182 cp_bra (ctx, AUTO_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate() 183 cp_bra (ctx, USER_SAVE, PENDING, cp_setup_save); in nv50_grctx_generate() 185 cp_name(ctx, cp_check_load); in nv50_grctx_generate() 186 cp_bra (ctx, AUTO_LOAD, PENDING, cp_setup_auto_load); in nv50_grctx_generate() [all …]
|
| H A D | ctxnv40.c | 159 nv40_gr_construct_general(struct nvkm_grctx *ctx) in nv40_gr_construct_general() argument 161 struct nvkm_device *device = ctx->device; in nv40_gr_construct_general() 164 cp_ctx(ctx, 0x4000a4, 1); in nv40_gr_construct_general() 165 gr_def(ctx, 0x4000a4, 0x00000008); in nv40_gr_construct_general() 166 cp_ctx(ctx, 0x400144, 58); in nv40_gr_construct_general() 167 gr_def(ctx, 0x400144, 0x00000001); in nv40_gr_construct_general() 168 cp_ctx(ctx, 0x400314, 1); in nv40_gr_construct_general() 169 gr_def(ctx, 0x400314, 0x00000000); in nv40_gr_construct_general() 170 cp_ctx(ctx, 0x400400, 10); in nv40_gr_construct_general() 171 cp_ctx(ctx, 0x400480, 10); in nv40_gr_construct_general() [all …]
|
| /linux/drivers/gpu/drm/panel/ |
| H A D | panel-boe-tv101wum-nl6.c | 59 #define nt36523_switch_page(ctx, page) \ argument 60 mipi_dsi_dcs_write_seq_multi(ctx, NT36523_DCS_SWITCH_PAGE, (page)) 62 static void nt36523_enable_reload_cmds(struct mipi_dsi_multi_context *ctx) in nt36523_enable_reload_cmds() argument 64 mipi_dsi_dcs_write_seq_multi(ctx, 0xfb, 0x01); in nt36523_enable_reload_cmds() 69 struct mipi_dsi_multi_context ctx = { .dsi = boe->dsi }; in boe_tv110c9m_init() local 71 nt36523_switch_page(&ctx, 0x20); in boe_tv110c9m_init() 72 nt36523_enable_reload_cmds(&ctx); in boe_tv110c9m_init() 73 mipi_dsi_dcs_write_seq_multi(&ctx, 0x05, 0xd9); in boe_tv110c9m_init() 74 mipi_dsi_dcs_write_seq_multi(&ctx, 0x07, 0x78); in boe_tv110c9m_init() 75 mipi_dsi_dcs_write_seq_multi(&ctx, 0x08, 0x5a); in boe_tv110c9m_init() [all …]
|
| /linux/tools/testing/selftests/bpf/progs/ |
| H A D | test_tcp_custom_syncookie.c | 73 static int tcp_load_headers(struct tcp_syncookie *ctx) in tcp_load_headers() argument 75 ctx->data = (void *)(long)ctx->skb->data; in tcp_load_headers() 76 ctx->data_end = (void *)(long)ctx->skb->data_end; in tcp_load_headers() 77 ctx->eth = (struct ethhdr *)(long)ctx->skb->data; in tcp_load_headers() 79 if (ctx->eth + 1 > ctx->data_end) in tcp_load_headers() 82 switch (bpf_ntohs(ctx in tcp_load_headers() 126 tcp_reload_headers(struct tcp_syncookie * ctx) tcp_reload_headers() argument 161 tcp_v4_csum(struct tcp_syncookie * ctx,__wsum csum) tcp_v4_csum() argument 167 tcp_v6_csum(struct tcp_syncookie * ctx,__wsum csum) tcp_v6_csum() argument 173 tcp_validate_header(struct tcp_syncookie * ctx) tcp_validate_header() argument 202 next(struct tcp_syncookie * ctx,__u32 sz) next() argument 220 tcp_parse_option(__u32 index,struct tcp_syncookie * ctx) tcp_parse_option() argument 284 tcp_parse_options(struct tcp_syncookie * ctx) tcp_parse_options() argument 291 tcp_validate_sysctl(struct tcp_syncookie * ctx) tcp_validate_sysctl() argument 314 tcp_prepare_cookie(struct tcp_syncookie * ctx) tcp_prepare_cookie() argument 364 tcp_write_options(struct tcp_syncookie * ctx) tcp_write_options() argument 399 tcp_handle_syn(struct tcp_syncookie * ctx) tcp_handle_syn() argument 462 tcp_validate_cookie(struct tcp_syncookie * ctx) tcp_validate_cookie() argument 505 tcp_handle_ack(struct tcp_syncookie * ctx) tcp_handle_ack() argument 565 struct tcp_syncookie ctx = { tcp_custom_syncookie() local [all...] |
| H A D | test_sk_lookup.c | 76 int lookup_pass(struct bpf_sk_lookup *ctx) in lookup_pass() argument 82 int lookup_drop(struct bpf_sk_lookup *ctx) in lookup_drop() argument 88 int check_ifindex(struct bpf_sk_lookup *ctx) in check_ifindex() argument 90 if (ctx->ingress_ifindex == 1) in check_ifindex() 96 int reuseport_pass(struct sk_reuseport_md *ctx) in reuseport_pass() argument 102 int reuseport_drop(struct sk_reuseport_md *ctx) in reuseport_drop() argument 109 int redir_port(struct bpf_sk_lookup *ctx) in redir_port() argument 114 if (ctx->local_port != DST_PORT) in redir_port() 121 err = bpf_sk_assign(ctx, sk, 0); in redir_port() 128 int redir_ip4(struct bpf_sk_lookup *ctx) in redir_ip4() argument [all …]
|
| /linux/arch/powerpc/platforms/cell/spufs/ |
| H A D | context.c | 27 struct spu_context *ctx; in alloc_spu_context() local 29 ctx = kzalloc(sizeof *ctx, GFP_KERNEL); in alloc_spu_context() 30 if (!ctx) in alloc_spu_context() 35 if (spu_init_csa(&ctx->csa)) in alloc_spu_context() 37 spin_lock_init(&ctx->mmio_lock); in alloc_spu_context() 38 mutex_init(&ctx->mapping_lock); in alloc_spu_context() 39 kref_init(&ctx->kref); in alloc_spu_context() 40 mutex_init(&ctx->state_mutex); in alloc_spu_context() 41 mutex_init(&ctx->run_mutex); in alloc_spu_context() 42 init_waitqueue_head(&ctx->ibox_wq); in alloc_spu_context() [all …]
|
| H A D | run.c | 17 struct spu_context *ctx = spu->ctx; in spufs_stop_callback() local 26 if (ctx) { in spufs_stop_callback() 30 ctx->csa.class_0_pending = spu->class_0_pending; in spufs_stop_callback() 31 ctx->csa.class_0_dar = spu->class_0_dar; in spufs_stop_callback() 34 ctx->csa.class_1_dsisr = spu->class_1_dsisr; in spufs_stop_callback() 35 ctx->csa.class_1_dar = spu->class_1_dar; in spufs_stop_callback() 45 wake_up_all(&ctx->stop_wq); in spufs_stop_callback() 49 int spu_stopped(struct spu_context *ctx, u32 *stat) in spu_stopped() argument 58 *stat = ctx->ops->status_read(ctx); in spu_stopped() 69 if (test_bit(SPU_SCHED_NOTIFY_ACTIVE, &ctx->sched_flags)) in spu_stopped() [all …]
|
| H A D | backing_ops.c | 34 static void gen_spu_event(struct spu_context *ctx, u32 event) in gen_spu_event() argument 40 ch0_cnt = ctx->csa.spu_chnlcnt_RW[0]; in gen_spu_event() 41 ch0_data = ctx->csa.spu_chnldata_RW[0]; in gen_spu_event() 42 ch1_data = ctx->csa.spu_chnldata_RW[1]; in gen_spu_event() 43 ctx->csa.spu_chnldata_RW[0] |= event; in gen_spu_event() 45 ctx->csa.spu_chnlcnt_RW[0] = 1; in gen_spu_event() 49 static int spu_backing_mbox_read(struct spu_context *ctx, u32 * data) in spu_backing_mbox_read() argument 54 spin_lock(&ctx->csa.register_lock); in spu_backing_mbox_read() 55 mbox_stat = ctx->csa.prob.mb_stat_R; in spu_backing_mbox_read() 61 *data = ctx->csa.prob.pu_mb_R; in spu_backing_mbox_read() [all …]
|
| H A D | file.c | 160 struct spu_context *ctx = i->i_ctx; in spufs_mem_open() local 162 mutex_lock(&ctx->mapping_lock); in spufs_mem_open() 163 file->private_data = ctx; in spufs_mem_open() 165 ctx->local_store = inode->i_mapping; in spufs_mem_open() 166 mutex_unlock(&ctx->mapping_lock); in spufs_mem_open() 174 struct spu_context *ctx = i->i_ctx; in spufs_mem_release() local 176 mutex_lock(&ctx->mapping_lock); in spufs_mem_release() 178 ctx->local_store = NULL; in spufs_mem_release() 179 mutex_unlock(&ctx->mapping_lock); in spufs_mem_release() 184 spufs_mem_dump(struct spu_context *ctx, struct coredump_params *cprm) in spufs_mem_dump() argument [all …]
|
| /linux/drivers/ras/amd/atl/ |
| H A D | map.c | 15 static int df2_get_intlv_mode(struct addr_ctx *ctx) in df2_get_intlv_mode() argument 17 ctx->map.intlv_mode = FIELD_GET(DF2_INTLV_NUM_CHAN, ctx->map.base); in df2_get_intlv_mode() 19 if (ctx->map.intlv_mode == 8) in df2_get_intlv_mode() 20 ctx->map.intlv_mode = DF2_2CHAN_HASH; in df2_get_intlv_mode() 22 if (ctx->map.intlv_mode != NONE && in df2_get_intlv_mode() 23 ctx->map.intlv_mode != NOHASH_2CHAN && in df2_get_intlv_mode() 24 ctx->map.intlv_mode != DF2_2CHAN_HASH) in df2_get_intlv_mode() 30 static int df3_get_intlv_mode(struct addr_ctx *ctx) in df3_get_intlv_mode() argument 32 ctx->map.intlv_mode = FIELD_GET(DF3_INTLV_NUM_CHAN, ctx->map.base); in df3_get_intlv_mode() 36 static int df3p5_get_intlv_mode(struct addr_ctx *ctx) in df3p5_get_intlv_mode() argument [all …]
|
| H A D | dehash.c | 15 static int df2_dehash_addr(struct addr_ctx *ctx) in df2_dehash_addr() argument 19 intlv_bit_pos = ctx->map.intlv_bit_pos; in df2_dehash_addr() 20 intlv_bit = !!(BIT_ULL(intlv_bit_pos) & ctx->ret_addr); in df2_dehash_addr() 23 hashed_bit ^= FIELD_GET(BIT_ULL(12), ctx->ret_addr); in df2_dehash_addr() 24 hashed_bit ^= FIELD_GET(BIT_ULL(18), ctx->ret_addr); in df2_dehash_addr() 25 hashed_bit ^= FIELD_GET(BIT_ULL(21), ctx->ret_addr); in df2_dehash_addr() 26 hashed_bit ^= FIELD_GET(BIT_ULL(30), ctx->ret_addr); in df2_dehash_addr() 29 ctx->ret_addr ^= BIT_ULL(intlv_bit_pos); in df2_dehash_addr() 34 static int df3_dehash_addr(struct addr_ctx *ctx) in df3_dehash_addr() argument 39 hash_ctl_64k = FIELD_GET(DF3_HASH_CTL_64K, ctx->map.ctl); in df3_dehash_addr() [all …]
|
| /linux/tools/testing/selftests/filesystems/epoll/ |
| H A D | epoll_wakeup_test.c | 43 static void kill_timeout(struct epoll_mtcontext *ctx) in kill_timeout() argument 46 pthread_kill(ctx->main, SIGUSR1); in kill_timeout() 47 pthread_kill(ctx->waiter, SIGUSR1); in kill_timeout() 53 struct epoll_mtcontext *ctx = data; in waiter_entry1a() local 55 if (epoll_wait(ctx->efd[0], &e, 1, -1) > 0) in waiter_entry1a() 56 __sync_fetch_and_add(&ctx->count, 1); in waiter_entry1a() 65 struct epoll_mtcontext *ctx = data; in waiter_entry1ap() local 67 pfd.fd = ctx->efd[0]; in waiter_entry1ap() 70 if (epoll_wait(ctx->efd[0], &e, 1, 0) > 0) in waiter_entry1ap() 71 __sync_fetch_and_add(&ctx->count, 1); in waiter_entry1ap() [all …]
|
| /linux/drivers/media/i2c/ |
| H A D | max2175.c | 30 #define mxm_dbg(ctx, fmt, arg...) dev_dbg(&ctx->client->dev, fmt, ## arg) argument 31 #define mxm_err(ctx, fmt, arg...) dev_err(&ctx->client->dev, fmt, ## arg) argument 316 static int max2175_read(struct max2175 *ctx, u8 idx, u8 *val) in max2175_read() argument 321 ret = regmap_read(ctx->regmap, idx, ®val); in max2175_read() 323 mxm_err(ctx, "read ret(%d): idx 0x%02x\n", ret, idx); in max2175_read() 330 static int max2175_write(struct max2175 *ctx, u8 idx, u8 val) in max2175_write() argument 334 ret = regmap_write(ctx->regmap, idx, val); in max2175_write() 336 mxm_err(ctx, "write ret(%d): idx 0x%02x val 0x%02x\n", in max2175_write() 342 static u8 max2175_read_bits(struct max2175 *ctx, u8 idx, u8 msb, u8 lsb) in max2175_read_bits() argument 346 if (max2175_read(ctx, idx, &val)) in max2175_read_bits() [all …]
|
| /linux/fs/smb/client/ |
| H A D | fs_context.c | 217 cifs_parse_security_flavors(struct fs_context *fc, char *value, struct smb3_fs_context *ctx) in cifs_parse_security_flavors() argument 226 ctx->sectype = Unspecified; in cifs_parse_security_flavors() 227 ctx->sign = false; in cifs_parse_security_flavors() 234 ctx->sign = true; in cifs_parse_security_flavors() 237 ctx->sectype = Kerberos; in cifs_parse_security_flavors() 240 ctx->sign = true; in cifs_parse_security_flavors() 243 ctx->sectype = RawNTLMSSP; in cifs_parse_security_flavors() 246 ctx->sign = true; in cifs_parse_security_flavors() 249 ctx->sectype = NTLMv2; in cifs_parse_security_flavors() 252 ctx->nullauth = 1; in cifs_parse_security_flavors() [all …]
|
| /linux/drivers/media/platform/mediatek/vcodec/decoder/ |
| H A D | mtk_vcodec_dec_stateful.c | 55 static struct vb2_buffer *get_display_buffer(struct mtk_vcodec_dec_ctx *ctx) in get_display_buffer() argument 61 mtk_v4l2_vdec_dbg(3, ctx, "[%d]", ctx->id); in get_display_buffer() 62 if (vdec_if_get_param(ctx, GET_PARAM_DISP_FRAME_BUFFER, in get_display_buffer() 64 mtk_v4l2_vdec_err(ctx, "[%d]Cannot get param : GET_PARAM_DISP_FRAME_BUFFER", in get_display_buffer() 65 ctx->id); in get_display_buffer() 70 mtk_v4l2_vdec_dbg(3, ctx, "No display frame buffer"); in get_display_buffer() 77 mutex_lock(&ctx->lock); in get_display_buffer() 79 mtk_v4l2_vdec_dbg(2, ctx, "[%d]status=%x queue id=%d to done_list %d", in get_display_buffer() 80 ctx->id, disp_frame_buffer->status, in get_display_buffer() 84 ctx->decoded_frame_cnt++; in get_display_buffer() [all …]
|
| H A D | vdec_drv_if.c | 17 int vdec_if_init(struct mtk_vcodec_dec_ctx *ctx, unsigned int fourcc) in vdec_if_init() argument 19 enum mtk_vdec_hw_arch hw_arch = ctx->dev->vdec_pdata->hw_arch; in vdec_if_init() 24 if (!ctx->dev->vdec_pdata->is_subdev_supported) { in vdec_if_init() 25 ctx->dec_if = &vdec_h264_slice_if; in vdec_if_init() 26 ctx->hw_id = MTK_VDEC_CORE; in vdec_if_init() 28 ctx->dec_if = &vdec_h264_slice_multi_if; in vdec_if_init() 29 ctx->hw_id = IS_VDEC_LAT_ARCH(hw_arch) ? MTK_VDEC_LAT0 : MTK_VDEC_CORE; in vdec_if_init() 33 ctx->dec_if = &vdec_h264_if; in vdec_if_init() 34 ctx->hw_id = MTK_VDEC_CORE; in vdec_if_init() 37 ctx->dec_if = &vdec_vp8_slice_if; in vdec_if_init() [all …]
|
| /linux/drivers/gpu/drm/bridge/analogix/ |
| H A D | anx7625.c | 46 static int i2c_access_workaround(struct anx7625_data *ctx, in i2c_access_workaround() argument 53 if (client == ctx->last_client) in i2c_access_workaround() 56 ctx->last_client = client; in i2c_access_workaround() 58 if (client == ctx->i2c.tcpc_client) in i2c_access_workaround() 60 else if (client == ctx->i2c.tx_p0_client) in i2c_access_workaround() 62 else if (client == ctx->i2c.tx_p1_client) in i2c_access_workaround() 64 else if (client == ctx->i2c.rx_p0_client) in i2c_access_workaround() 66 else if (client == ctx->i2c.rx_p1_client) in i2c_access_workaround() 80 static int anx7625_reg_read(struct anx7625_data *ctx, in anx7625_reg_read() argument 86 i2c_access_workaround(ctx, client); in anx7625_reg_read() [all …]
|
| /linux/drivers/media/dvb-core/ |
| H A D | dvb_vb2.c | 33 struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vq); in _queue_setup() local 35 ctx->buf_cnt = *nbuffers; in _queue_setup() 37 sizes[0] = ctx->buf_siz; in _queue_setup() 44 dprintk(3, "[%s] count=%d, size=%d\n", ctx->name, in _queue_setup() 52 struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in _buffer_prepare() local 53 unsigned long size = ctx->buf_siz; in _buffer_prepare() 57 ctx->name, vb2_plane_size(vb, 0), size); in _buffer_prepare() 62 dprintk(3, "[%s]\n", ctx->name); in _buffer_prepare() 69 struct dvb_vb2_ctx *ctx = vb2_get_drv_priv(vb->vb2_queue); in _buffer_queue() local 73 spin_lock_irqsave(&ctx->slock, flags); in _buffer_queue() [all …]
|
| /linux/tools/perf/tests/ |
| H A D | expr.c | 61 static int test(struct expr_parse_ctx *ctx, const char *e, double val2) in test() argument 65 if (expr__parse(&val, ctx, e)) in test() 77 struct expr_parse_ctx *ctx; in test__expr() local 87 ctx = expr__ctx_new(); in test__expr() 88 TEST_ASSERT_VAL("expr__ctx_new", ctx); in test__expr() 89 expr__add_id_val(ctx, strdup("FOO"), 1); in test__expr() 90 expr__add_id_val(ctx, strdup("BAR"), 2); in test__expr() 92 ret = test(ctx, "1+1", 2); in test__expr() 93 ret |= test(ctx, "FOO+BAR", 3); in test__expr() 94 ret |= test(ctx, "(BAR/2)%2", 1); in test__expr() [all …]
|
| /linux/crypto/asymmetric_keys/ |
| H A D | x509_cert_parser.c | 64 struct x509_parse_context *ctx __free(kfree) = NULL; in x509_cert_parse() 77 ctx = kzalloc(sizeof(struct x509_parse_context), GFP_KERNEL); in x509_cert_parse() 78 if (!ctx) in x509_cert_parse() 81 ctx->cert = cert; in x509_cert_parse() 82 ctx->data = (unsigned long)data; in x509_cert_parse() 85 ret = asn1_ber_decoder(&x509_decoder, ctx, data, datalen); in x509_cert_parse() 90 if (ctx->raw_akid) { in x509_cert_parse() 92 ctx->raw_akid_size, ctx->raw_akid_size, ctx->raw_akid); in x509_cert_parse() 93 ret = asn1_ber_decoder(&x509_akid_decoder, ctx, in x509_cert_parse() 94 ctx->raw_akid, ctx->raw_akid_size); in x509_cert_parse() [all …]
|
| /linux/arch/mips/net/ |
| H A D | bpf_jit_comp32.c | 167 static void clobber_reg64(struct jit_context *ctx, const u8 reg[]) in clobber_reg64() argument 169 clobber_reg(ctx, reg[0]); in clobber_reg64() 170 clobber_reg(ctx, reg[1]); in clobber_reg64() 174 static void emit_mov_se_i64(struct jit_context *ctx, const u8 dst[], s32 imm) in emit_mov_se_i64() argument 176 emit_mov_i(ctx, lo(dst), imm); in emit_mov_se_i64() 178 emit(ctx, addiu, hi(dst), MIPS_R_ZERO, -1); in emit_mov_se_i64() 180 emit(ctx, move, hi(dst), MIPS_R_ZERO); in emit_mov_se_i64() 181 clobber_reg64(ctx, dst); in emit_mov_se_i64() 185 static void emit_zext_ver(struct jit_context *ctx, const u8 dst[]) in emit_zext_ver() argument 187 if (!ctx->program->aux->verifier_zext) { in emit_zext_ver() [all …]
|
| H A D | bpf_jit_comp.c | 127 int push_regs(struct jit_context *ctx, u32 mask, u32 excl, int depth) in push_regs() argument 135 emit(ctx, sw, reg, depth, MIPS_R_SP); in push_regs() 137 emit(ctx, sd, reg, depth, MIPS_R_SP); in push_regs() 142 ctx->stack_used = max((int)ctx->stack_used, depth); in push_regs() 150 int pop_regs(struct jit_context *ctx, u32 mask, u32 excl, int depth) in pop_regs() argument 158 emit(ctx, lw, reg, depth, MIPS_R_SP); in pop_regs() 160 emit(ctx, ld, reg, depth, MIPS_R_SP); in pop_regs() 169 int get_target(struct jit_context *ctx, u32 loc) in get_target() argument 171 u32 index = INDEX(ctx->descriptors[loc]); in get_target() 172 unsigned long pc = (unsigned long)&ctx->target[ctx->jit_index]; in get_target() [all …]
|
| /linux/arch/parisc/net/ |
| H A D | bpf_jit_comp32.c | 120 static void emit_hppa_copy(const s8 rs, const s8 rd, struct hppa_jit_context *ctx) in emit_hppa_copy() argument 122 REG_SET_SEEN(ctx, rd); in emit_hppa_copy() 125 REG_SET_SEEN(ctx, rs); in emit_hppa_copy() 126 emit(hppa_copy(rs, rd), ctx); in emit_hppa_copy() 129 static void emit_hppa_xor(const s8 r1, const s8 r2, const s8 r3, struct hppa_jit_context *ctx) in emit_hppa_xor() argument 131 REG_SET_SEEN(ctx, r1); in emit_hppa_xor() 132 REG_SET_SEEN(ctx, r2); in emit_hppa_xor() 133 REG_SET_SEEN(ctx, r3); in emit_hppa_xor() 135 emit(hppa_copy(HPPA_REG_ZERO, r3), ctx); in emit_hppa_xor() 137 emit(hppa_xor(r1, r2, r3), ctx); in emit_hppa_xor() [all …]
|
| /linux/drivers/media/platform/samsung/s5p-mfc/ |
| H A D | s5p_mfc_opr_v5.c | 34 static int s5p_mfc_alloc_dec_temp_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_dec_temp_buffers_v5() argument 36 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_dec_temp_buffers_v5() 40 ctx->dsc.size = buf_size->dsc; in s5p_mfc_alloc_dec_temp_buffers_v5() 41 ret = s5p_mfc_alloc_priv_buf(dev, BANK_L_CTX, &ctx->dsc); in s5p_mfc_alloc_dec_temp_buffers_v5() 47 BUG_ON(ctx->dsc.dma & ((1 << MFC_BANK1_ALIGN_ORDER) - 1)); in s5p_mfc_alloc_dec_temp_buffers_v5() 48 memset(ctx->dsc.virt, 0, ctx->dsc.size); in s5p_mfc_alloc_dec_temp_buffers_v5() 55 static void s5p_mfc_release_dec_desc_buffer_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_release_dec_desc_buffer_v5() argument 57 s5p_mfc_release_priv_buf(ctx->dev, &ctx->dsc); in s5p_mfc_release_dec_desc_buffer_v5() 61 static int s5p_mfc_alloc_codec_buffers_v5(struct s5p_mfc_ctx *ctx) in s5p_mfc_alloc_codec_buffers_v5() argument 63 struct s5p_mfc_dev *dev = ctx->dev; in s5p_mfc_alloc_codec_buffers_v5() [all …]
|
| /linux/drivers/misc/ocxl/ |
| H A D | context.c | 11 struct ocxl_context *ctx; in ocxl_context_alloc() local 13 ctx = kzalloc(sizeof(*ctx), GFP_KERNEL); in ocxl_context_alloc() 14 if (!ctx) in ocxl_context_alloc() 17 ctx->afu = afu; in ocxl_context_alloc() 19 pasid = idr_alloc(&afu->contexts_idr, ctx, afu->pasid_base, in ocxl_context_alloc() 23 kfree(ctx); in ocxl_context_alloc() 29 ctx->pasid = pasid; in ocxl_context_alloc() 30 ctx->status = OPENED; in ocxl_context_alloc() 31 mutex_init(&ctx->status_mutex); in ocxl_context_alloc() 32 ctx->mapping = mapping; in ocxl_context_alloc() [all …]
|