X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-sparc64%2Fsystem.h;h=6897ac31be4100baf733c45043294a36d4ed66bb;hb=13c48c490208d9e70d8d66d56f96c5054db69af7;hp=99a669c190c776a030ddc2be595693982a1c3fe0;hpb=95b00786f3b8fa99f53931361beeb4c10504ad87;p=linux-2.6 diff --git a/include/asm-sparc64/system.h b/include/asm-sparc64/system.h index 99a669c190..6897ac31be 100644 --- a/include/asm-sparc64/system.h +++ b/include/asm-sparc64/system.h @@ -8,6 +8,7 @@ #ifndef __ASSEMBLY__ #include +#include /* * Sparc (general) CPU types @@ -29,6 +30,8 @@ enum sparc_cpu { #define ARCH_SUN4C_SUN4 0 #define ARCH_SUN4 0 +extern char reboot_command[]; + /* These are here in an effort to more fully work around Spitfire Errata * #51. Essentially, if a memory barrier occurs soon after a mispredicted * branch, the chip can stop executing instructions until a trap occurs. @@ -116,6 +119,7 @@ do { __asm__ __volatile__("ba,pt %%xcc, 1f\n\t" \ extern void sun_do_break(void); extern int stop_a_enabled; +extern void fault_in_user_windows(void); extern void synchronize_user_stack(void); extern void __flushw_user(void); @@ -176,12 +180,13 @@ do { if (test_thread_flag(TIF_PERFCTR)) { \ "ldx [%%sp + 2047 + 0x70], %%i6\n\t" \ "ldx [%%sp + 2047 + 0x78], %%i7\n\t" \ "ldx [%%g6 + %9], %%g4\n\t" \ - "brz,pt %%o7, 1f\n\t" \ + "brz,pt %%o7, switch_to_pc\n\t" \ " mov %%g7, %0\n\t" \ "sethi %%hi(ret_from_syscall), %%g1\n\t" \ "jmpl %%g1 + %%lo(ret_from_syscall), %%g0\n\t" \ " nop\n\t" \ - "1:\n\t" \ + ".globl switch_to_pc\n\t" \ + "switch_to_pc:\n\t" \ : "=&r" (last), "=r" (current), "=r" (current_thread_info_reg), \ "=r" (__local_per_cpu_offset) \ : "0" (task_thread_info(next)), \ @@ -315,6 +320,34 @@ __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size) (unsigned long)_n_, sizeof(*(ptr))); \ }) +/* + * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make + * them available. + */ + +static inline unsigned long __cmpxchg_local(volatile void *ptr, + unsigned long old, + unsigned long new, int size) +{ + switch (size) { + case 4: + case 8: return __cmpxchg(ptr, old, new, size); + default: + return __cmpxchg_local_generic(ptr, old, new, size); + } + + return old; +} + +#define cmpxchg_local(ptr, o, n) \ + ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \ + (unsigned long)(n), sizeof(*(ptr)))) +#define cmpxchg64_local(ptr, o, n) \ + ({ \ + BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ + cmpxchg_local((ptr), (o), (n)); \ + }) + #endif /* !(__ASSEMBLY__) */ #define arch_align_stack(x) (x)