# define _ASM_PTR " .long "
# define _ASM_ALIGN " .balign 4 "
+# define _ASM_MOV_UL " movl "
#else
/* 64 bits */
# define _ASM_PTR " .quad "
# define _ASM_ALIGN " .balign 8 "
+# define _ASM_MOV_UL " movq "
#endif /* CONFIG_X86_32 */
-#ifdef CONFIG_X86_32
-# include "resume-trace_32.h"
-#else
-# include "resume-trace_64.h"
+#ifndef _ASM_X86_RESUME_TRACE_H
+#define _ASM_X86_RESUME_TRACE_H
+
+#include <asm/asm.h>
+
+#define TRACE_RESUME(user) do { \
+ if (pm_trace_enabled) { \
+ void *tracedata; \
+ asm volatile(_ASM_MOV_UL " $1f,%0\n" \
+ ".section .tracedata,\"a\"\n" \
+ "1:\t.word %c1\n\t" \
+ _ASM_PTR " %c2\n" \
+ ".previous" \
+ :"=r" (tracedata) \
+ : "i" (__LINE__), "i" (__FILE__)); \
+ generate_resume_trace(tracedata, user); \
+ } \
+} while (0)
+
#endif
+++ /dev/null
-#define TRACE_RESUME(user) do { \
- if (pm_trace_enabled) { \
- void *tracedata; \
- asm volatile("movl $1f,%0\n" \
- ".section .tracedata,\"a\"\n" \
- "1:\t.word %c1\n" \
- "\t.long %c2\n" \
- ".previous" \
- :"=r" (tracedata) \
- : "i" (__LINE__), "i" (__FILE__)); \
- generate_resume_trace(tracedata, user); \
- } \
-} while (0)
+++ /dev/null
-#define TRACE_RESUME(user) do { \
- if (pm_trace_enabled) { \
- void *tracedata; \
- asm volatile("movq $1f,%0\n" \
- ".section .tracedata,\"a\"\n" \
- "1:\t.word %c1\n" \
- "\t.quad %c2\n" \
- ".previous" \
- :"=r" (tracedata) \
- : "i" (__LINE__), "i" (__FILE__)); \
- generate_resume_trace(tracedata, user); \
- } \
-} while (0)