1 #ifndef CPU_X86_LAPIC_H
2 #define CPU_X86_LAPIC_H
4 #include <cpu/x86/lapic_def.h>
5 #include <cpu/x86/msr.h>
8 /* See if I need to initialize the local apic */
9 #if CONFIG_SMP || CONFIG_IOAPIC
13 static inline unsigned long lapic_read(unsigned long reg)
15 return *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg));
18 static inline void lapic_write(unsigned long reg, unsigned long v)
20 *((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg)) = v;
23 static inline void lapic_wait_icr_idle(void)
25 do { } while ( lapic_read( LAPIC_ICR ) & LAPIC_ICR_BUSY );
30 static inline void enable_lapic(void)
34 msr = rdmsr(LAPIC_BASE_MSR);
37 msr.lo |= LAPIC_DEFAULT_BASE | (1 << 11);
38 wrmsr(LAPIC_BASE_MSR, msr);
41 static inline void disable_lapic(void)
44 msr = rdmsr(LAPIC_BASE_MSR);
46 wrmsr(LAPIC_BASE_MSR, msr);
49 static inline unsigned long lapicid(void)
51 return lapic_read(LAPIC_ID) >> 24;
54 static inline void stop_this_cpu(void)
59 /* Send an APIC INIT to myself */
60 lapic_write(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
61 lapic_write(LAPIC_ICR, LAPIC_INT_LEVELTRIG | LAPIC_INT_ASSERT | LAPIC_DM_INIT);
62 /* Wait for the ipi send to finish */
63 lapic_wait_icr_idle();
65 /* Deassert the APIC INIT */
66 lapic_write(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
67 lapic_write(LAPIC_ICR, LAPIC_INT_LEVELTRIG | LAPIC_DM_INIT);
68 /* Wait for the ipi send to finish */
69 lapic_wait_icr_idle();
71 /* If I haven't halted spin forever */
77 #if ! defined (__ROMCC__)
79 #define xchg(ptr,v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v),(ptr),sizeof(*(ptr))))
81 struct __xchg_dummy { unsigned long a[100]; };
82 #define __xg(x) ((struct __xchg_dummy *)(x))
85 * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
86 * Note 2: xchg has side effect, so that attribute volatile is necessary,
87 * but generally the primitive is invalid, *ptr is output argument. --ANK
89 static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
93 __asm__ __volatile__("xchgb %b0,%1"
95 :"m" (*__xg(ptr)), "0" (x)
99 __asm__ __volatile__("xchgw %w0,%1"
101 :"m" (*__xg(ptr)), "0" (x)
105 __asm__ __volatile__("xchgl %0,%1"
107 :"m" (*__xg(ptr)), "0" (x)
115 extern inline void lapic_write_atomic(unsigned long reg, unsigned long v)
117 xchg((volatile unsigned long *)(LAPIC_DEFAULT_BASE+reg), v);
121 #ifdef CONFIG_X86_GOOD_APIC
122 # define FORCE_READ_AROUND_WRITE 0
123 # define lapic_read_around(x) lapic_read(x)
124 # define lapic_write_around(x,y) lapic_write((x),(y))
126 # define FORCE_READ_AROUND_WRITE 1
127 # define lapic_read_around(x) lapic_read(x)
128 # define lapic_write_around(x,y) lapic_write_atomic((x),(y))
131 static inline int lapic_remote_read(int apicid, int reg, unsigned long *pvalue)
134 unsigned long status;
136 lapic_wait_icr_idle();
137 lapic_write_around(LAPIC_ICR2, SET_LAPIC_DEST_FIELD(apicid));
138 lapic_write_around(LAPIC_ICR, LAPIC_DM_REMRD | (reg >> 4));
144 status = lapic_read(LAPIC_ICR) & LAPIC_ICR_RR_MASK;
145 } while (status == LAPIC_ICR_RR_INPROG && timeout++ < 1000);
148 if (status == LAPIC_ICR_RR_VALID) {
149 *pvalue = lapic_read(LAPIC_RRR);
156 void setup_lapic(void);
161 int start_cpu(struct device *cpu);
163 #endif /* CONFIG_SMP */
166 #endif /* !__ROMCC__ */
168 #endif /* CPU_X86_LAPIC_H */