1 #ifndef CPU_X86_LAPIC_H
2 #define CPU_X86_LAPIC_H
4 #include <cpu/x86/lapic_def.h>
5 #include <cpu/x86/msr.h>
8 /* See if I need to initialize the local apic */
9 #if CONFIG_SMP || CONFIG_IOAPIC
13 static inline __attribute__((always_inline)) unsigned long lapic_read(unsigned long reg)
15 return *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg));
18 static inline __attribute__((always_inline)) void lapic_write(unsigned long reg, unsigned long v)
20 *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg)) = v;
23 static inline __attribute__((always_inline)) void lapic_wait_icr_idle(void)
25 do { } while ( lapic_read( LAPIC_ICR ) & LAPIC_ICR_BUSY );
30 static inline void enable_lapic(void)
34 msr = rdmsr(LAPIC_BASE_MSR);
37 msr.lo |= LAPIC_DEFAULT_BASE | (1 << 11);
38 wrmsr(LAPIC_BASE_MSR, msr);
41 static inline void disable_lapic(void)
44 msr = rdmsr(LAPIC_BASE_MSR);
46 wrmsr(LAPIC_BASE_MSR, msr);
49 static inline __attribute__((always_inline)) unsigned long lapicid(void)
51 return lapic_read(LAPIC_ID) >> 24;
54 static inline __attribute__((always_inline)) void stop_this_cpu(void)
60 /* Send an APIC INIT to myself */
61 lapic_write(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
62 lapic_write(LAPIC_ICR, LAPIC_INT_LEVELTRIG | LAPIC_INT_ASSERT | LAPIC_DM_INIT);
63 /* Wait for the ipi send to finish */
64 lapic_wait_icr_idle();
66 /* Deassert the APIC INIT */
67 lapic_write(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
68 lapic_write(LAPIC_ICR, LAPIC_INT_LEVELTRIG | LAPIC_DM_INIT);
69 /* Wait for the ipi send to finish */
70 lapic_wait_icr_idle();
72 /* If I haven't halted spin forever */
78 #if ! defined (__ROMCC__)
80 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
82 struct __xchg_dummy { unsigned long a[100]; };
83 #define __xg(x) ((struct __xchg_dummy *)(x))
86 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
87 * Note 2: xchg has side effect, so that attribute volatile is necessary,
88 * but generally the primitive is invalid, *ptr is output argument. --ANK
90 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
94 __asm__ __volatile__("xchgb %b0,%1"
96 :"m" (*__xg(ptr)), "0" (x)
100 __asm__ __volatile__("xchgw %w0,%1"
102 :"m" (*__xg(ptr)), "0" (x)
106 __asm__ __volatile__("xchgl %0,%1"
108 :"m" (*__xg(ptr)), "0" (x)
116 extern inline void lapic_write_atomic(unsigned long reg, unsigned long v)
118 xchg((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg), v);
122 #ifdef CONFIG_X86_GOOD_APIC
123 # define FORCE_READ_AROUND_WRITE 0
124 # define lapic_read_around(x) lapic_read(x)
125 # define lapic_write_around(x,y) lapic_write((x),(y))
127 # define FORCE_READ_AROUND_WRITE 1
128 # define lapic_read_around(x) lapic_read(x)
129 # define lapic_write_around(x,y) lapic_write_atomic((x),(y))
132 static inline int lapic_remote_read(int apicid, int reg, unsigned long *pvalue)
135 unsigned long status;
137 lapic_wait_icr_idle();
138 lapic_write_around(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
139 lapic_write_around(LAPIC_ICR, LAPIC_DM_REMRD | (reg >> 4));
145 status = lapic_read(LAPIC_ICR) & LAPIC_ICR_RR_MASK;
146 } while (status == LAPIC_ICR_RR_INPROG && timeout++ < 1000);
149 if (status == LAPIC_ICR_RR_VALID) {
150 *pvalue = lapic_read(LAPIC_RRR);
157 void setup_lapic(void);
162 int start_cpu(struct device *cpu);
164 #endif /* CONFIG_SMP */
167 #endif /* !__ROMCC__ */
169 #endif /* CPU_X86_LAPIC_H */