Lines Matching +full:0 +full:xd00
16 asm volatile("slbmte %0,%1" : in insert_slb_entry()
21 asm volatile("slbmte %0,%1" : in insert_slb_entry()
28 /* Inject slb multihit on vmalloc-ed address i.e 0xD00... */
42 p[0] = '!'; in inject_vmalloc_slb_multihit()
44 return 0; in inject_vmalloc_slb_multihit()
47 /* Inject slb multihit on kmalloc-ed address i.e 0xC00... */
61 p[0] = '!'; in inject_kmalloc_slb_multihit()
63 return 0; in inject_kmalloc_slb_multihit()
68 * multihit in bolted entry 0.
74 unsigned long i = 0; in insert_dup_slb_entry_0()
79 asm volatile("slbmfee %0,%1" : "=r" (esid) : "r" (i)); in insert_dup_slb_entry_0()
80 asm volatile("slbmfev %0,%1" : "=r" (vsid) : "r" (i)); in insert_dup_slb_entry_0()
82 /* for i !=0 we would need to mask out the old entry number */ in insert_dup_slb_entry_0()
83 asm volatile("slbmte %0,%1" : in insert_dup_slb_entry_0()
88 asm volatile("slbmfee %0,%1" : "=r" (esid) : "r" (i)); in insert_dup_slb_entry_0()
89 asm volatile("slbmfev %0,%1" : "=r" (vsid) : "r" (i)); in insert_dup_slb_entry_0()
91 /* for i !=0 we would need to mask out the old entry number */ in insert_dup_slb_entry_0()
92 asm volatile("slbmte %0,%1" : in insert_dup_slb_entry_0()
97 pr_info("%s accessing test address 0x%lx: 0x%lx\n", in insert_dup_slb_entry_0()