atomic.h
上传用户:lgb322
上传日期:2013-02-24
资源大小:30529k
文件大小:5k
源码类别:

嵌入式Linux

开发平台:

Unix_Linux

  1. #ifndef __ARCH_S390_ATOMIC__
  2. #define __ARCH_S390_ATOMIC__
  3. /*
  4.  *  include/asm-s390/atomic.h
  5.  *
  6.  *  S390 version
  7.  *    Copyright (C) 1999,2000 IBM Deutschland Entwicklung GmbH, IBM Corporation
  8.  *    Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
  9.  *               Denis Joseph Barrow
  10.  *
  11.  *  Derived from "include/asm-i386/bitops.h"
  12.  *    Copyright (C) 1992, Linus Torvalds
  13.  *
  14.  */
  15. /*
  16.  * Atomic operations that C can't guarantee us.  Useful for
  17.  * resource counting etc..
  18.  * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
  19.  */
  20. typedef struct { volatile int counter; } __attribute__ ((aligned (4))) atomic_t;
  21. #define ATOMIC_INIT(i)  { (i) }
  22. #define atomic_eieio()          __asm__ __volatile__ ("BCR 15,0")
  23. #define __CS_LOOP(old_val, new_val, ptr, op_val, op_string)
  24.         __asm__ __volatile__("   l     %0,0(%2)n"
  25.                              "0: lr    %1,%0n"
  26.                              op_string "  %1,%3n"
  27.                              "   cs    %0,%1,0(%2)n"
  28.                              "   jl    0b"
  29.                              : "=&d" (old_val), "=&d" (new_val)
  30.      : "a" (ptr), "d" (op_val) : "cc" );
  31. static __inline__ int atomic_read(atomic_t *v)
  32. {
  33.         int retval;
  34.         __asm__ __volatile__("bcr      15,0nt"
  35.                              "l        %0,%1"
  36.                              : "=d" (retval) : "m" (*v) );
  37.         return retval;
  38. }
  39. static __inline__ void atomic_set(atomic_t *v, int i)
  40. {
  41.         __asm__ __volatile__("st  %1,%0nt"
  42.                              "bcr 15,0"
  43.                              : "=m" (*v) : "d" (i) );
  44. }
  45. static __inline__ void atomic_add(int i, atomic_t *v)
  46. {
  47. int old_val, new_val;
  48. __CS_LOOP(old_val, new_val, v, i, "ar");
  49. }
  50. static __inline__ int atomic_add_return (int i, atomic_t *v)
  51. {
  52. int old_val, new_val;
  53. __CS_LOOP(old_val, new_val, v, i, "ar");
  54. return new_val;
  55. }
  56. static __inline__ int atomic_add_negative(int i, atomic_t *v)
  57. {
  58. int old_val, new_val;
  59.         __CS_LOOP(old_val, new_val, v, i, "ar");
  60.         return new_val < 0;
  61. }
  62. static __inline__ void atomic_sub(int i, atomic_t *v)
  63. {
  64. int old_val, new_val;
  65. __CS_LOOP(old_val, new_val, v, i, "sr");
  66. }
  67. static __inline__ void atomic_inc(volatile atomic_t *v)
  68. {
  69. int old_val, new_val;
  70. __CS_LOOP(old_val, new_val, v, 1, "ar");
  71. }
  72. static __inline__ int atomic_inc_return(volatile atomic_t *v)
  73. {
  74. int old_val, new_val;
  75. __CS_LOOP(old_val, new_val, v, 1, "ar");
  76.         return new_val;
  77. }
  78. static __inline__ int atomic_inc_and_test(volatile atomic_t *v)
  79. {
  80. int old_val, new_val;
  81. __CS_LOOP(old_val, new_val, v, 1, "ar");
  82. return new_val != 0;
  83. }
  84. static __inline__ void atomic_dec(volatile atomic_t *v)
  85. {
  86. int old_val, new_val;
  87. __CS_LOOP(old_val, new_val, v, 1, "sr");
  88. }
  89. static __inline__ int atomic_dec_return(volatile atomic_t *v)
  90. {
  91. int old_val, new_val;
  92. __CS_LOOP(old_val, new_val, v, 1, "sr");
  93.         return new_val;
  94. }
  95. static __inline__ int atomic_dec_and_test(volatile atomic_t *v)
  96. {
  97. int old_val, new_val;
  98. __CS_LOOP(old_val, new_val, v, 1, "sr");
  99.         return new_val == 0;
  100. }
  101. static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t *v)
  102. {
  103. int old_val, new_val;
  104. __CS_LOOP(old_val, new_val, v, ~mask, "nr");
  105. }
  106. static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *v)
  107. {
  108. int old_val, new_val;
  109. __CS_LOOP(old_val, new_val, v, mask, "or");
  110. }
  111. /*
  112.   returns 0  if expected_oldval==value in *v ( swap was successful )
  113.   returns 1  if unsuccessful.
  114. */
  115. static __inline__ int
  116. atomic_compare_and_swap(int expected_oldval,int new_val,atomic_t *v)
  117. {
  118.         int retval;
  119.         __asm__ __volatile__(
  120.                 "  lr   0,%2n"
  121.                 "  cs   0,%3,0(%1)n"
  122.                 "  ipm  %0n"
  123.                 "  srl  %0,28n"
  124.                 "0:"
  125.                 : "=&d" (retval)
  126.                 : "a" (v), "d" (expected_oldval) , "d" (new_val)
  127.                 : "0", "cc");
  128.         return retval;
  129. }
  130. /*
  131.   Spin till *v = expected_oldval then swap with newval.
  132.  */
  133. static __inline__ void
  134. atomic_compare_and_swap_spin(int expected_oldval,int new_val,atomic_t *v)
  135. {
  136.         __asm__ __volatile__(
  137.                 "0: lr  0,%1n"
  138.                 "   cs  0,%2,0(%0)n"
  139.                 "   jl  0bn"
  140.                 : : "a" (v), "d" (expected_oldval) , "d" (new_val)
  141.                 : "cc", "0" );
  142. }
  143. #define atomic_compare_and_swap_debug(where,from,to) 
  144. if (atomic_compare_and_swap ((from), (to), (where))) {
  145. printk (KERN_WARNING"%s/%d atomic counter:%s couldn't be changed from %d(%s) to %d(%s), was %dn",
  146. __FILE__,__LINE__,#where,(from),#from,(to),#to,atomic_read (where));
  147.         atomic_set(where,(to));
  148. }
  149. #define smp_mb__before_atomic_dec() smp_mb()
  150. #define smp_mb__after_atomic_dec() smp_mb()
  151. #define smp_mb__before_atomic_inc() smp_mb()
  152. #define smp_mb__after_atomic_inc() smp_mb()
  153. #endif                                 /* __ARCH_S390_ATOMIC __            */