xref: /linux/arch/alpha/include/asm/processor.h (revision 93df8a1ed6231727c5db94a80b1a6bd5ee67cec3)
1 /*
2  * include/asm-alpha/processor.h
3  *
4  * Copyright (C) 1994 Linus Torvalds
5  */
6 
7 #ifndef __ASM_ALPHA_PROCESSOR_H
8 #define __ASM_ALPHA_PROCESSOR_H
9 
10 #include <linux/personality.h>	/* for ADDR_LIMIT_32BIT */
11 
12 /*
13  * Returns current instruction pointer ("program counter").
14  */
15 #define current_text_addr() \
16   ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; })
17 
18 /*
19  * We have a 42-bit user address space: 4TB user VM...
20  */
21 #define TASK_SIZE (0x40000000000UL)
22 
23 #define STACK_TOP \
24   (current->personality & ADDR_LIMIT_32BIT ? 0x80000000 : 0x00120000000UL)
25 
26 #define STACK_TOP_MAX	0x00120000000UL
27 
28 /* This decides where the kernel will search for a free chunk of vm
29  * space during mmap's.
30  */
31 #define TASK_UNMAPPED_BASE \
32   ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2)
33 
34 typedef struct {
35 	unsigned long seg;
36 } mm_segment_t;
37 
38 /* This is dead.  Everything has been moved to thread_info.  */
39 struct thread_struct { };
40 #define INIT_THREAD  { }
41 
42 /* Return saved PC of a blocked thread.  */
43 struct task_struct;
44 extern unsigned long thread_saved_pc(struct task_struct *);
45 
46 /* Do necessary setup to start up a newly executed thread.  */
47 struct pt_regs;
48 extern void start_thread(struct pt_regs *, unsigned long, unsigned long);
49 
50 /* Free all resources held by a thread. */
51 extern void release_thread(struct task_struct *);
52 
53 unsigned long get_wchan(struct task_struct *p);
54 
55 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc)
56 
57 #define KSTK_ESP(tsk) \
58   ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp)
59 
60 #define cpu_relax()	barrier()
61 #define cpu_relax_lowlatency() cpu_relax()
62 
63 #define ARCH_HAS_PREFETCH
64 #define ARCH_HAS_PREFETCHW
65 #define ARCH_HAS_SPINLOCK_PREFETCH
66 
67 #ifndef CONFIG_SMP
68 /* Nothing to prefetch. */
69 #define spin_lock_prefetch(lock)  	do { } while (0)
70 #endif
71 
72 extern inline void prefetch(const void *ptr)
73 {
74 	__builtin_prefetch(ptr, 0, 3);
75 }
76 
77 extern inline void prefetchw(const void *ptr)
78 {
79 	__builtin_prefetch(ptr, 1, 3);
80 }
81 
82 #ifdef CONFIG_SMP
83 extern inline void spin_lock_prefetch(const void *ptr)
84 {
85 	__builtin_prefetch(ptr, 1, 3);
86 }
87 #endif
88 
89 #endif /* __ASM_ALPHA_PROCESSOR_H */
90