1 #ifndef _RUNTIME_INSTR_H 2 #define _RUNTIME_INSTR_H 3 4 #define S390_RUNTIME_INSTR_START 0x1 5 #define S390_RUNTIME_INSTR_STOP 0x2 6 7 struct runtime_instr_cb { 8 __u64 rca; 9 __u64 roa; 10 __u64 rla; 11 12 __u32 v : 1; 13 __u32 s : 1; 14 __u32 k : 1; 15 __u32 h : 1; 16 __u32 a : 1; 17 __u32 reserved1 : 3; 18 __u32 ps : 1; 19 __u32 qs : 1; 20 __u32 pc : 1; 21 __u32 qc : 1; 22 __u32 reserved2 : 1; 23 __u32 g : 1; 24 __u32 u : 1; 25 __u32 l : 1; 26 __u32 key : 4; 27 __u32 reserved3 : 8; 28 __u32 t : 1; 29 __u32 rgs : 3; 30 31 __u32 m : 4; 32 __u32 n : 1; 33 __u32 mae : 1; 34 __u32 reserved4 : 2; 35 __u32 c : 1; 36 __u32 r : 1; 37 __u32 b : 1; 38 __u32 j : 1; 39 __u32 e : 1; 40 __u32 x : 1; 41 __u32 reserved5 : 2; 42 __u32 bpxn : 1; 43 __u32 bpxt : 1; 44 __u32 bpti : 1; 45 __u32 bpni : 1; 46 __u32 reserved6 : 2; 47 48 __u32 d : 1; 49 __u32 f : 1; 50 __u32 ic : 4; 51 __u32 dc : 4; 52 53 __u64 reserved7; 54 __u64 sf; 55 __u64 rsic; 56 __u64 reserved8; 57 } __packed __aligned(8); 58 59 extern struct runtime_instr_cb runtime_instr_empty_cb; 60 61 static inline void load_runtime_instr_cb(struct runtime_instr_cb *cb) 62 { 63 asm volatile(".insn rsy,0xeb0000000060,0,0,%0" /* LRIC */ 64 : : "Q" (*cb)); 65 } 66 67 static inline void store_runtime_instr_cb(struct runtime_instr_cb *cb) 68 { 69 asm volatile(".insn rsy,0xeb0000000061,0,0,%0" /* STRIC */ 70 : "=Q" (*cb) : : "cc"); 71 } 72 73 static inline void save_ri_cb(struct runtime_instr_cb *cb_prev) 74 { 75 if (cb_prev) 76 store_runtime_instr_cb(cb_prev); 77 } 78 79 static inline void restore_ri_cb(struct runtime_instr_cb *cb_next, 80 struct runtime_instr_cb *cb_prev) 81 { 82 if (cb_next) 83 load_runtime_instr_cb(cb_next); 84 else if (cb_prev) 85 load_runtime_instr_cb(&runtime_instr_empty_cb); 86 } 87 88 struct task_struct; 89 90 void runtime_instr_release(struct task_struct *tsk); 91 92 #endif /* _RUNTIME_INSTR_H */ 93