Lines Matching full:vcpu

63  * Per-vCPU cache. Since most accesses from a vCPU will be to
141 typedef int (mem_cb_t)(struct vcpu *vcpu, uint64_t gpa, struct mem_range *mr,
145 mem_read(struct vcpu *vcpu, uint64_t gpa, uint64_t *rval, int size, void *arg) in mem_read() argument
150 error = (*mr->handler)(vcpu, MEM_F_READ, gpa, size, rval, mr->arg1, in mem_read()
156 mem_write(struct vcpu *vcpu, uint64_t gpa, uint64_t wval, int size, void *arg) in mem_write() argument
161 error = (*mr->handler)(vcpu, MEM_F_WRITE, gpa, size, &wval, mr->arg1, in mem_write()
167 access_memory(struct vcpu *vcpu, uint64_t paddr, mem_cb_t *cb, void *arg) in access_memory() argument
172 vcpuid = vcpu_id(vcpu); in access_memory()
175 * First check the per-vCPU cache in access_memory()
186 /* Update the per-vCPU cache */ in access_memory()
214 err = cb(vcpu, paddr, &entry->mr_param, arg); in access_memory()
230 emulate_mem_cb(struct vcpu *vcpu, uint64_t paddr, struct mem_range *mr, in emulate_mem_cb() argument
236 return (vmm_emulate_instruction(vcpu, paddr, ema->vie, ema->paging, in emulate_mem_cb()
241 emulate_mem(struct vcpu *vcpu, uint64_t paddr, struct vie *vie, in emulate_mem() argument
248 return (access_memory(vcpu, paddr, emulate_mem_cb, &ema)); in emulate_mem()
258 rw_mem_cb(struct vcpu *vcpu, uint64_t paddr, struct mem_range *mr, void *arg) in rw_mem_cb() argument
263 return (mr->handler(vcpu, rma->operation, paddr, rma->size, in rw_mem_cb()
268 read_mem(struct vcpu *vcpu, uint64_t gpa, uint64_t *rval, int size) in read_mem() argument
275 return (access_memory(vcpu, gpa, rw_mem_cb, &rma)); in read_mem()
279 write_mem(struct vcpu *vcpu, uint64_t gpa, uint64_t wval, int size) in write_mem() argument
286 return (access_memory(vcpu, gpa, rw_mem_cb, &rma)); in write_mem()
348 /* flush Per-vCPU cache */ in unregister_mem()