1 #ifndef CPU_X86_LAPIC_H
2 #define CPU_X86_LAPIC_H
4 #include <cpu/x86/lapic_def.h>
5 #include <cpu/x86/msr.h>
8 /* See if I need to initialize the local apic */
9 #if CONFIG_SMP || CONFIG_IOAPIC
15 static inline __attribute__((always_inline)) unsigned long lapic_read(unsigned long reg)
17 return *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg));
20 static inline __attribute__((always_inline)) void lapic_write(unsigned long reg, unsigned long v)
22 *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg)) = v;
25 static inline __attribute__((always_inline)) void lapic_wait_icr_idle(void)
27 do { } while ( lapic_read( LAPIC_ICR ) & LAPIC_ICR_BUSY );
32 static inline void enable_lapic(void)
36 msr = rdmsr(LAPIC_BASE_MSR);
39 msr.lo |= LAPIC_DEFAULT_BASE | (1 << 11);
40 wrmsr(LAPIC_BASE_MSR, msr);
43 static inline void disable_lapic(void)
46 msr = rdmsr(LAPIC_BASE_MSR);
48 wrmsr(LAPIC_BASE_MSR, msr);
51 static inline __attribute__((always_inline)) unsigned long lapicid(void)
53 return lapic_read(LAPIC_ID) >> 24;
57 #if CONFIG_AP_IN_SIPI_WAIT != 1
58 /* If we need to go back to sipi wait, we use the long non-inlined version of
59 * this function in lapic_cpu_init.c
61 static inline __attribute__((always_inline)) void stop_this_cpu(void)
63 /* Called by an AP when it is ready to halt and wait for a new task */
69 void stop_this_cpu(void);
72 #if !defined(__PRE_RAM__)
74 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
76 struct __xchg_dummy { unsigned long a[100]; };
77 #define __xg(x) ((struct __xchg_dummy *)(x))
80 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
81 * Note 2: xchg has side effect, so that attribute volatile is necessary,
82 * but generally the primitive is invalid, *ptr is output argument. --ANK
84 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
88 __asm__ __volatile__("xchgb %b0,%1"
90 :"m" (*__xg(ptr)), "0" (x)
94 __asm__ __volatile__("xchgw %w0,%1"
96 :"m" (*__xg(ptr)), "0" (x)
100 __asm__ __volatile__("xchgl %0,%1"
102 :"m" (*__xg(ptr)), "0" (x)
109 static inline void lapic_write_atomic(unsigned long reg, unsigned long v)
111 (void)xchg((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg), v);
116 # define FORCE_READ_AROUND_WRITE 0
117 # define lapic_read_around(x) lapic_read(x)
118 # define lapic_write_around(x,y) lapic_write((x),(y))
120 # define FORCE_READ_AROUND_WRITE 1
121 # define lapic_read_around(x) lapic_read(x)
122 # define lapic_write_around(x,y) lapic_write_atomic((x),(y))
125 static inline int lapic_remote_read(int apicid, int reg, unsigned long *pvalue)
128 unsigned long status;
130 lapic_wait_icr_idle();
131 lapic_write_around(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
132 lapic_write_around(LAPIC_ICR, LAPIC_DM_REMRD | (reg >> 4));
138 status = lapic_read(LAPIC_ICR) & LAPIC_ICR_RR_MASK;
139 } while (status == LAPIC_ICR_RR_INPROG && timeout++ < 1000);
142 if (status == LAPIC_ICR_RR_VALID) {
143 *pvalue = lapic_read(LAPIC_RRR);
150 void setup_lapic(void);
154 int start_cpu(struct device *cpu);
155 #endif /* CONFIG_SMP */
157 #endif /* !__PRE_RAM__ */
159 #endif /* CPU_X86_LAPIC_H */