]> err.no Git - linux-2.6/blob - include/asm-mips/atomic.h
[PATCH] atomic: cmpxchg
[linux-2.6] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
22
23 #ifndef _ASM_ATOMIC_H
24 #define _ASM_ATOMIC_H
25
26 #include <asm/cpu-features.h>
27 #include <asm/war.h>
28
29 extern spinlock_t atomic_lock;
30
31 typedef struct { volatile int counter; } atomic_t;
32
33 #define ATOMIC_INIT(i)    { (i) }
34
35 /*
36  * atomic_read - read atomic variable
37  * @v: pointer of type atomic_t
38  *
39  * Atomically reads the value of @v.
40  */
41 #define atomic_read(v)          ((v)->counter)
42
43 /*
44  * atomic_set - set atomic variable
45  * @v: pointer of type atomic_t
46  * @i: required value
47  *
48  * Atomically sets the value of @v to @i.
49  */
50 #define atomic_set(v,i)         ((v)->counter = (i))
51
52 /*
53  * atomic_add - add integer to atomic variable
54  * @i: integer value to add
55  * @v: pointer of type atomic_t
56  *
57  * Atomically adds @i to @v.
58  */
59 static __inline__ void atomic_add(int i, atomic_t * v)
60 {
61         if (cpu_has_llsc && R10000_LLSC_WAR) {
62                 unsigned long temp;
63
64                 __asm__ __volatile__(
65                 "       .set    mips3                                   \n"
66                 "1:     ll      %0, %1          # atomic_add            \n"
67                 "       addu    %0, %2                                  \n"
68                 "       sc      %0, %1                                  \n"
69                 "       beqzl   %0, 1b                                  \n"
70                 "       .set    mips0                                   \n"
71                 : "=&r" (temp), "=m" (v->counter)
72                 : "Ir" (i), "m" (v->counter));
73         } else if (cpu_has_llsc) {
74                 unsigned long temp;
75
76                 __asm__ __volatile__(
77                 "       .set    mips3                                   \n"
78                 "1:     ll      %0, %1          # atomic_add            \n"
79                 "       addu    %0, %2                                  \n"
80                 "       sc      %0, %1                                  \n"
81                 "       beqz    %0, 1b                                  \n"
82                 "       .set    mips0                                   \n"
83                 : "=&r" (temp), "=m" (v->counter)
84                 : "Ir" (i), "m" (v->counter));
85         } else {
86                 unsigned long flags;
87
88                 spin_lock_irqsave(&atomic_lock, flags);
89                 v->counter += i;
90                 spin_unlock_irqrestore(&atomic_lock, flags);
91         }
92 }
93
94 /*
95  * atomic_sub - subtract the atomic variable
96  * @i: integer value to subtract
97  * @v: pointer of type atomic_t
98  *
99  * Atomically subtracts @i from @v.
100  */
101 static __inline__ void atomic_sub(int i, atomic_t * v)
102 {
103         if (cpu_has_llsc && R10000_LLSC_WAR) {
104                 unsigned long temp;
105
106                 __asm__ __volatile__(
107                 "       .set    mips3                                   \n"
108                 "1:     ll      %0, %1          # atomic_sub            \n"
109                 "       subu    %0, %2                                  \n"
110                 "       sc      %0, %1                                  \n"
111                 "       beqzl   %0, 1b                                  \n"
112                 "       .set    mips0                                   \n"
113                 : "=&r" (temp), "=m" (v->counter)
114                 : "Ir" (i), "m" (v->counter));
115         } else if (cpu_has_llsc) {
116                 unsigned long temp;
117
118                 __asm__ __volatile__(
119                 "       .set    mips3                                   \n"
120                 "1:     ll      %0, %1          # atomic_sub            \n"
121                 "       subu    %0, %2                                  \n"
122                 "       sc      %0, %1                                  \n"
123                 "       beqz    %0, 1b                                  \n"
124                 "       .set    mips0                                   \n"
125                 : "=&r" (temp), "=m" (v->counter)
126                 : "Ir" (i), "m" (v->counter));
127         } else {
128                 unsigned long flags;
129
130                 spin_lock_irqsave(&atomic_lock, flags);
131                 v->counter -= i;
132                 spin_unlock_irqrestore(&atomic_lock, flags);
133         }
134 }
135
136 /*
137  * Same as above, but return the result value
138  */
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
140 {
141         unsigned long result;
142
143         if (cpu_has_llsc && R10000_LLSC_WAR) {
144                 unsigned long temp;
145
146                 __asm__ __volatile__(
147                 "       .set    mips3                                   \n"
148                 "1:     ll      %1, %2          # atomic_add_return     \n"
149                 "       addu    %0, %1, %3                              \n"
150                 "       sc      %0, %2                                  \n"
151                 "       beqzl   %0, 1b                                  \n"
152                 "       addu    %0, %1, %3                              \n"
153                 "       sync                                            \n"
154                 "       .set    mips0                                   \n"
155                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156                 : "Ir" (i), "m" (v->counter)
157                 : "memory");
158         } else if (cpu_has_llsc) {
159                 unsigned long temp;
160
161                 __asm__ __volatile__(
162                 "       .set    mips3                                   \n"
163                 "1:     ll      %1, %2          # atomic_add_return     \n"
164                 "       addu    %0, %1, %3                              \n"
165                 "       sc      %0, %2                                  \n"
166                 "       beqz    %0, 1b                                  \n"
167                 "       addu    %0, %1, %3                              \n"
168                 "       sync                                            \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 spin_lock_irqsave(&atomic_lock, flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 spin_unlock_irqrestore(&atomic_lock, flags);
181         }
182
183         return result;
184 }
185
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 {
188         unsigned long result;
189
190         if (cpu_has_llsc && R10000_LLSC_WAR) {
191                 unsigned long temp;
192
193                 __asm__ __volatile__(
194                 "       .set    mips3                                   \n"
195                 "1:     ll      %1, %2          # atomic_sub_return     \n"
196                 "       subu    %0, %1, %3                              \n"
197                 "       sc      %0, %2                                  \n"
198                 "       beqzl   %0, 1b                                  \n"
199                 "       subu    %0, %1, %3                              \n"
200                 "       sync                                            \n"
201                 "       .set    mips0                                   \n"
202                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203                 : "Ir" (i), "m" (v->counter)
204                 : "memory");
205         } else if (cpu_has_llsc) {
206                 unsigned long temp;
207
208                 __asm__ __volatile__(
209                 "       .set    mips3                                   \n"
210                 "1:     ll      %1, %2          # atomic_sub_return     \n"
211                 "       subu    %0, %1, %3                              \n"
212                 "       sc      %0, %2                                  \n"
213                 "       beqz    %0, 1b                                  \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sync                                            \n"
216                 "       .set    mips0                                   \n"
217                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218                 : "Ir" (i), "m" (v->counter)
219                 : "memory");
220         } else {
221                 unsigned long flags;
222
223                 spin_lock_irqsave(&atomic_lock, flags);
224                 result = v->counter;
225                 result -= i;
226                 v->counter = result;
227                 spin_unlock_irqrestore(&atomic_lock, flags);
228         }
229
230         return result;
231 }
232
233 /*
234  * atomic_sub_if_positive - add integer to atomic variable
235  * @v: pointer of type atomic_t
236  *
237  * Atomically test @v and decrement if it is greater than 0.
238  * The function returns the old value of @v minus 1.
239  */
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241 {
242         unsigned long result;
243
244         if (cpu_has_llsc && R10000_LLSC_WAR) {
245                 unsigned long temp;
246
247                 __asm__ __volatile__(
248                 "       .set    mips3                                   \n"
249                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
250                 "       subu    %0, %1, %3                              \n"
251                 "       bltz    %0, 1f                                  \n"
252                 "       sc      %0, %2                                  \n"
253                 "       beqzl   %0, 1b                                  \n"
254                 "       sync                                            \n"
255                 "1:                                                     \n"
256                 "       .set    mips0                                   \n"
257                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258                 : "Ir" (i), "m" (v->counter)
259                 : "memory");
260         } else if (cpu_has_llsc) {
261                 unsigned long temp;
262
263                 __asm__ __volatile__(
264                 "       .set    mips3                                   \n"
265                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
266                 "       subu    %0, %1, %3                              \n"
267                 "       bltz    %0, 1f                                  \n"
268                 "       sc      %0, %2                                  \n"
269                 "       beqz    %0, 1b                                  \n"
270                 "       sync                                            \n"
271                 "1:                                                     \n"
272                 "       .set    mips0                                   \n"
273                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274                 : "Ir" (i), "m" (v->counter)
275                 : "memory");
276         } else {
277                 unsigned long flags;
278
279                 spin_lock_irqsave(&atomic_lock, flags);
280                 result = v->counter;
281                 result -= i;
282                 if (result >= 0)
283                         v->counter = result;
284                 spin_unlock_irqrestore(&atomic_lock, flags);
285         }
286
287         return result;
288 }
289
290 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
291
292 #define atomic_dec_return(v) atomic_sub_return(1,(v))
293 #define atomic_inc_return(v) atomic_add_return(1,(v))
294
295 /*
296  * atomic_sub_and_test - subtract value from variable and test result
297  * @i: integer value to subtract
298  * @v: pointer of type atomic_t
299  *
300  * Atomically subtracts @i from @v and returns
301  * true if the result is zero, or false for all
302  * other cases.
303  */
304 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
305
306 /*
307  * atomic_inc_and_test - increment and test
308  * @v: pointer of type atomic_t
309  *
310  * Atomically increments @v by 1
311  * and returns true if the result is zero, or false for all
312  * other cases.
313  */
314 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
315
316 /*
317  * atomic_dec_and_test - decrement by 1 and test
318  * @v: pointer of type atomic_t
319  *
320  * Atomically decrements @v by 1 and
321  * returns true if the result is 0, or false for all other
322  * cases.
323  */
324 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
325
326 /*
327  * atomic_dec_if_positive - decrement by 1 if old value positive
328  * @v: pointer of type atomic_t
329  */
330 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
331
332 /*
333  * atomic_inc - increment atomic variable
334  * @v: pointer of type atomic_t
335  *
336  * Atomically increments @v by 1.
337  */
338 #define atomic_inc(v) atomic_add(1,(v))
339
340 /*
341  * atomic_dec - decrement and test
342  * @v: pointer of type atomic_t
343  *
344  * Atomically decrements @v by 1.
345  */
346 #define atomic_dec(v) atomic_sub(1,(v))
347
348 /*
349  * atomic_add_negative - add and test if negative
350  * @v: pointer of type atomic_t
351  * @i: integer value to add
352  *
353  * Atomically adds @i to @v and returns true
354  * if the result is negative, or false when
355  * result is greater than or equal to zero.
356  */
357 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
358
359 #ifdef CONFIG_64BIT
360
361 typedef struct { volatile __s64 counter; } atomic64_t;
362
363 #define ATOMIC64_INIT(i)    { (i) }
364
365 /*
366  * atomic64_read - read atomic variable
367  * @v: pointer of type atomic64_t
368  *
369  */
370 #define atomic64_read(v)        ((v)->counter)
371
372 /*
373  * atomic64_set - set atomic variable
374  * @v: pointer of type atomic64_t
375  * @i: required value
376  */
377 #define atomic64_set(v,i)       ((v)->counter = (i))
378
379 /*
380  * atomic64_add - add integer to atomic variable
381  * @i: integer value to add
382  * @v: pointer of type atomic64_t
383  *
384  * Atomically adds @i to @v.
385  */
386 static __inline__ void atomic64_add(long i, atomic64_t * v)
387 {
388         if (cpu_has_llsc && R10000_LLSC_WAR) {
389                 unsigned long temp;
390
391                 __asm__ __volatile__(
392                 "       .set    mips3                                   \n"
393                 "1:     lld     %0, %1          # atomic64_add          \n"
394                 "       addu    %0, %2                                  \n"
395                 "       scd     %0, %1                                  \n"
396                 "       beqzl   %0, 1b                                  \n"
397                 "       .set    mips0                                   \n"
398                 : "=&r" (temp), "=m" (v->counter)
399                 : "Ir" (i), "m" (v->counter));
400         } else if (cpu_has_llsc) {
401                 unsigned long temp;
402
403                 __asm__ __volatile__(
404                 "       .set    mips3                                   \n"
405                 "1:     lld     %0, %1          # atomic64_add          \n"
406                 "       addu    %0, %2                                  \n"
407                 "       scd     %0, %1                                  \n"
408                 "       beqz    %0, 1b                                  \n"
409                 "       .set    mips0                                   \n"
410                 : "=&r" (temp), "=m" (v->counter)
411                 : "Ir" (i), "m" (v->counter));
412         } else {
413                 unsigned long flags;
414
415                 spin_lock_irqsave(&atomic_lock, flags);
416                 v->counter += i;
417                 spin_unlock_irqrestore(&atomic_lock, flags);
418         }
419 }
420
421 /*
422  * atomic64_sub - subtract the atomic variable
423  * @i: integer value to subtract
424  * @v: pointer of type atomic64_t
425  *
426  * Atomically subtracts @i from @v.
427  */
428 static __inline__ void atomic64_sub(long i, atomic64_t * v)
429 {
430         if (cpu_has_llsc && R10000_LLSC_WAR) {
431                 unsigned long temp;
432
433                 __asm__ __volatile__(
434                 "       .set    mips3                                   \n"
435                 "1:     lld     %0, %1          # atomic64_sub          \n"
436                 "       subu    %0, %2                                  \n"
437                 "       scd     %0, %1                                  \n"
438                 "       beqzl   %0, 1b                                  \n"
439                 "       .set    mips0                                   \n"
440                 : "=&r" (temp), "=m" (v->counter)
441                 : "Ir" (i), "m" (v->counter));
442         } else if (cpu_has_llsc) {
443                 unsigned long temp;
444
445                 __asm__ __volatile__(
446                 "       .set    mips3                                   \n"
447                 "1:     lld     %0, %1          # atomic64_sub          \n"
448                 "       subu    %0, %2                                  \n"
449                 "       scd     %0, %1                                  \n"
450                 "       beqz    %0, 1b                                  \n"
451                 "       .set    mips0                                   \n"
452                 : "=&r" (temp), "=m" (v->counter)
453                 : "Ir" (i), "m" (v->counter));
454         } else {
455                 unsigned long flags;
456
457                 spin_lock_irqsave(&atomic_lock, flags);
458                 v->counter -= i;
459                 spin_unlock_irqrestore(&atomic_lock, flags);
460         }
461 }
462
463 /*
464  * Same as above, but return the result value
465  */
466 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
467 {
468         unsigned long result;
469
470         if (cpu_has_llsc && R10000_LLSC_WAR) {
471                 unsigned long temp;
472
473                 __asm__ __volatile__(
474                 "       .set    mips3                                   \n"
475                 "1:     lld     %1, %2          # atomic64_add_return   \n"
476                 "       addu    %0, %1, %3                              \n"
477                 "       scd     %0, %2                                  \n"
478                 "       beqzl   %0, 1b                                  \n"
479                 "       addu    %0, %1, %3                              \n"
480                 "       sync                                            \n"
481                 "       .set    mips0                                   \n"
482                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
483                 : "Ir" (i), "m" (v->counter)
484                 : "memory");
485         } else if (cpu_has_llsc) {
486                 unsigned long temp;
487
488                 __asm__ __volatile__(
489                 "       .set    mips3                                   \n"
490                 "1:     lld     %1, %2          # atomic64_add_return   \n"
491                 "       addu    %0, %1, %3                              \n"
492                 "       scd     %0, %2                                  \n"
493                 "       beqz    %0, 1b                                  \n"
494                 "       addu    %0, %1, %3                              \n"
495                 "       sync                                            \n"
496                 "       .set    mips0                                   \n"
497                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
498                 : "Ir" (i), "m" (v->counter)
499                 : "memory");
500         } else {
501                 unsigned long flags;
502
503                 spin_lock_irqsave(&atomic_lock, flags);
504                 result = v->counter;
505                 result += i;
506                 v->counter = result;
507                 spin_unlock_irqrestore(&atomic_lock, flags);
508         }
509
510         return result;
511 }
512
513 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
514 {
515         unsigned long result;
516
517         if (cpu_has_llsc && R10000_LLSC_WAR) {
518                 unsigned long temp;
519
520                 __asm__ __volatile__(
521                 "       .set    mips3                                   \n"
522                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
523                 "       subu    %0, %1, %3                              \n"
524                 "       scd     %0, %2                                  \n"
525                 "       beqzl   %0, 1b                                  \n"
526                 "       subu    %0, %1, %3                              \n"
527                 "       sync                                            \n"
528                 "       .set    mips0                                   \n"
529                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
530                 : "Ir" (i), "m" (v->counter)
531                 : "memory");
532         } else if (cpu_has_llsc) {
533                 unsigned long temp;
534
535                 __asm__ __volatile__(
536                 "       .set    mips3                                   \n"
537                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
538                 "       subu    %0, %1, %3                              \n"
539                 "       scd     %0, %2                                  \n"
540                 "       beqz    %0, 1b                                  \n"
541                 "       subu    %0, %1, %3                              \n"
542                 "       sync                                            \n"
543                 "       .set    mips0                                   \n"
544                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
545                 : "Ir" (i), "m" (v->counter)
546                 : "memory");
547         } else {
548                 unsigned long flags;
549
550                 spin_lock_irqsave(&atomic_lock, flags);
551                 result = v->counter;
552                 result -= i;
553                 v->counter = result;
554                 spin_unlock_irqrestore(&atomic_lock, flags);
555         }
556
557         return result;
558 }
559
560 /*
561  * atomic64_sub_if_positive - add integer to atomic variable
562  * @v: pointer of type atomic64_t
563  *
564  * Atomically test @v and decrement if it is greater than 0.
565  * The function returns the old value of @v minus 1.
566  */
567 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
568 {
569         unsigned long result;
570
571         if (cpu_has_llsc && R10000_LLSC_WAR) {
572                 unsigned long temp;
573
574                 __asm__ __volatile__(
575                 "       .set    mips3                                   \n"
576                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
577                 "       dsubu   %0, %1, %3                              \n"
578                 "       bltz    %0, 1f                                  \n"
579                 "       scd     %0, %2                                  \n"
580                 "       beqzl   %0, 1b                                  \n"
581                 "       sync                                            \n"
582                 "1:                                                     \n"
583                 "       .set    mips0                                   \n"
584                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585                 : "Ir" (i), "m" (v->counter)
586                 : "memory");
587         } else if (cpu_has_llsc) {
588                 unsigned long temp;
589
590                 __asm__ __volatile__(
591                 "       .set    mips3                                   \n"
592                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
593                 "       dsubu   %0, %1, %3                              \n"
594                 "       bltz    %0, 1f                                  \n"
595                 "       scd     %0, %2                                  \n"
596                 "       beqz    %0, 1b                                  \n"
597                 "       sync                                            \n"
598                 "1:                                                     \n"
599                 "       .set    mips0                                   \n"
600                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
601                 : "Ir" (i), "m" (v->counter)
602                 : "memory");
603         } else {
604                 unsigned long flags;
605
606                 spin_lock_irqsave(&atomic_lock, flags);
607                 result = v->counter;
608                 result -= i;
609                 if (result >= 0)
610                         v->counter = result;
611                 spin_unlock_irqrestore(&atomic_lock, flags);
612         }
613
614         return result;
615 }
616
617 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
618 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
619
620 /*
621  * atomic64_sub_and_test - subtract value from variable and test result
622  * @i: integer value to subtract
623  * @v: pointer of type atomic64_t
624  *
625  * Atomically subtracts @i from @v and returns
626  * true if the result is zero, or false for all
627  * other cases.
628  */
629 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
630
631 /*
632  * atomic64_inc_and_test - increment and test
633  * @v: pointer of type atomic64_t
634  *
635  * Atomically increments @v by 1
636  * and returns true if the result is zero, or false for all
637  * other cases.
638  */
639 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
640
641 /*
642  * atomic64_dec_and_test - decrement by 1 and test
643  * @v: pointer of type atomic64_t
644  *
645  * Atomically decrements @v by 1 and
646  * returns true if the result is 0, or false for all other
647  * cases.
648  */
649 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
650
651 /*
652  * atomic64_dec_if_positive - decrement by 1 if old value positive
653  * @v: pointer of type atomic64_t
654  */
655 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
656
657 /*
658  * atomic64_inc - increment atomic variable
659  * @v: pointer of type atomic64_t
660  *
661  * Atomically increments @v by 1.
662  */
663 #define atomic64_inc(v) atomic64_add(1,(v))
664
665 /*
666  * atomic64_dec - decrement and test
667  * @v: pointer of type atomic64_t
668  *
669  * Atomically decrements @v by 1.
670  */
671 #define atomic64_dec(v) atomic64_sub(1,(v))
672
673 /*
674  * atomic64_add_negative - add and test if negative
675  * @v: pointer of type atomic64_t
676  * @i: integer value to add
677  *
678  * Atomically adds @i to @v and returns true
679  * if the result is negative, or false when
680  * result is greater than or equal to zero.
681  */
682 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
683
684 #endif /* CONFIG_64BIT */
685
686 /*
687  * atomic*_return operations are serializing but not the non-*_return
688  * versions.
689  */
690 #define smp_mb__before_atomic_dec()     smp_mb()
691 #define smp_mb__after_atomic_dec()      smp_mb()
692 #define smp_mb__before_atomic_inc()     smp_mb()
693 #define smp_mb__after_atomic_inc()      smp_mb()
694
695 #endif /* _ASM_ATOMIC_H */