1 | #ifndef __X86_64_ASM_DEFNS_H__ |
---|
2 | #define __X86_64_ASM_DEFNS_H__ |
---|
3 | |
---|
4 | #include <asm/percpu.h> |
---|
5 | |
---|
6 | #ifndef NDEBUG |
---|
7 | /* Indicate special exception stack frame by inverting the frame pointer. */ |
---|
8 | #define SETUP_EXCEPTION_FRAME_POINTER \ |
---|
9 | movq %rsp,%rbp; \ |
---|
10 | notq %rbp |
---|
11 | #define ASSERT_INTERRUPT_STATUS(x) \ |
---|
12 | pushf; \ |
---|
13 | testb $X86_EFLAGS_IF>>8,1(%rsp); \ |
---|
14 | j##x 1f; \ |
---|
15 | ud2a; \ |
---|
16 | 1: addq $8,%rsp; |
---|
17 | #else |
---|
18 | #define SETUP_EXCEPTION_FRAME_POINTER |
---|
19 | #define ASSERT_INTERRUPT_STATUS(x) |
---|
20 | #endif |
---|
21 | |
---|
22 | #define ASSERT_INTERRUPTS_ENABLED ASSERT_INTERRUPT_STATUS(nz) |
---|
23 | #define ASSERT_INTERRUPTS_DISABLED ASSERT_INTERRUPT_STATUS(z) |
---|
24 | |
---|
25 | #define SAVE_ALL \ |
---|
26 | cld; \ |
---|
27 | pushq %rdi; \ |
---|
28 | pushq %rsi; \ |
---|
29 | pushq %rdx; \ |
---|
30 | pushq %rcx; \ |
---|
31 | pushq %rax; \ |
---|
32 | pushq %r8; \ |
---|
33 | pushq %r9; \ |
---|
34 | pushq %r10; \ |
---|
35 | pushq %r11; \ |
---|
36 | pushq %rbx; \ |
---|
37 | pushq %rbp; \ |
---|
38 | SETUP_EXCEPTION_FRAME_POINTER; \ |
---|
39 | pushq %r12; \ |
---|
40 | pushq %r13; \ |
---|
41 | pushq %r14; \ |
---|
42 | pushq %r15; |
---|
43 | |
---|
44 | #define RESTORE_ALL \ |
---|
45 | popq %r15; \ |
---|
46 | popq %r14; \ |
---|
47 | popq %r13; \ |
---|
48 | popq %r12; \ |
---|
49 | popq %rbp; \ |
---|
50 | popq %rbx; \ |
---|
51 | popq %r11; \ |
---|
52 | popq %r10; \ |
---|
53 | popq %r9; \ |
---|
54 | popq %r8; \ |
---|
55 | popq %rax; \ |
---|
56 | popq %rcx; \ |
---|
57 | popq %rdx; \ |
---|
58 | popq %rsi; \ |
---|
59 | popq %rdi; |
---|
60 | |
---|
61 | #ifdef PERF_COUNTERS |
---|
62 | #define PERFC_INCR(_name,_idx,_cur) \ |
---|
63 | pushq _cur; \ |
---|
64 | movslq VCPU_processor(_cur),_cur; \ |
---|
65 | pushq %rdx; \ |
---|
66 | leaq per_cpu__perfcounters(%rip),%rdx; \ |
---|
67 | shlq $PERCPU_SHIFT,_cur; \ |
---|
68 | addq %rdx,_cur; \ |
---|
69 | popq %rdx; \ |
---|
70 | incl _name*4(_cur,_idx,4); \ |
---|
71 | popq _cur |
---|
72 | #else |
---|
73 | #define PERFC_INCR(_name,_idx,_cur) |
---|
74 | #endif |
---|
75 | |
---|
76 | /* Work around AMD erratum #88 */ |
---|
77 | #define safe_swapgs \ |
---|
78 | "mfence; swapgs;" |
---|
79 | |
---|
80 | #ifdef __sun__ |
---|
81 | #define REX64_PREFIX "rex64\\" |
---|
82 | #else |
---|
83 | #define REX64_PREFIX "rex64/" |
---|
84 | #endif |
---|
85 | |
---|
86 | #define BUILD_SMP_INTERRUPT(x,v) XBUILD_SMP_INTERRUPT(x,v) |
---|
87 | #define XBUILD_SMP_INTERRUPT(x,v) \ |
---|
88 | asmlinkage void x(void); \ |
---|
89 | __asm__( \ |
---|
90 | "\n"__ALIGN_STR"\n" \ |
---|
91 | ".globl " STR(x) "\n\t" \ |
---|
92 | STR(x) ":\n\t" \ |
---|
93 | "pushq $0\n\t" \ |
---|
94 | "movl $"#v",4(%rsp)\n\t" \ |
---|
95 | STR(SAVE_ALL) \ |
---|
96 | "movq %rsp,%rdi\n\t" \ |
---|
97 | "callq "STR(smp_##x)"\n\t" \ |
---|
98 | "jmp ret_from_intr\n"); |
---|
99 | |
---|
100 | #define BUILD_COMMON_IRQ() \ |
---|
101 | __asm__( \ |
---|
102 | "\n" __ALIGN_STR"\n" \ |
---|
103 | "common_interrupt:\n\t" \ |
---|
104 | STR(SAVE_ALL) \ |
---|
105 | "movq %rsp,%rdi\n\t" \ |
---|
106 | "callq " STR(do_IRQ) "\n\t" \ |
---|
107 | "jmp ret_from_intr\n"); |
---|
108 | |
---|
109 | #define IRQ_NAME2(nr) nr##_interrupt(void) |
---|
110 | #define IRQ_NAME(nr) IRQ_NAME2(IRQ##nr) |
---|
111 | |
---|
112 | #define BUILD_IRQ(nr) \ |
---|
113 | asmlinkage void IRQ_NAME(nr); \ |
---|
114 | __asm__( \ |
---|
115 | "\n"__ALIGN_STR"\n" \ |
---|
116 | STR(IRQ) #nr "_interrupt:\n\t" \ |
---|
117 | "pushq $0\n\t" \ |
---|
118 | "movl $"#nr",4(%rsp)\n\t" \ |
---|
119 | "jmp common_interrupt"); |
---|
120 | |
---|
121 | #endif /* __X86_64_ASM_DEFNS_H__ */ |
---|