Source file src/sync/atomic/doc.go
1 // Copyright 2011 The Go Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style 3 // license that can be found in the LICENSE file. 4 5 // Package atomic provides low-level atomic memory primitives 6 // useful for implementing synchronization algorithms. 7 // 8 // These functions require great care to be used correctly. 9 // Except for special, low-level applications, synchronization is better 10 // done with channels or the facilities of the [sync] package. 11 // Share memory by communicating; 12 // don't communicate by sharing memory. 13 // 14 // The swap operation, implemented by the SwapT functions, is the atomic 15 // equivalent of: 16 // 17 // old = *addr 18 // *addr = new 19 // return old 20 // 21 // The compare-and-swap operation, implemented by the CompareAndSwapT 22 // functions, is the atomic equivalent of: 23 // 24 // if *addr == old { 25 // *addr = new 26 // return true 27 // } 28 // return false 29 // 30 // The add operation, implemented by the AddT functions, is the atomic 31 // equivalent of: 32 // 33 // *addr += delta 34 // return *addr 35 // 36 // The load and store operations, implemented by the LoadT and StoreT 37 // functions, are the atomic equivalents of "return *addr" and 38 // "*addr = val". 39 // 40 // In the terminology of [the Go memory model], if the effect of 41 // an atomic operation A is observed by atomic operation B, 42 // then A “synchronizes before” B. 43 // Additionally, all the atomic operations executed in a program 44 // behave as though executed in some sequentially consistent order. 45 // This definition provides the same semantics as 46 // C++'s sequentially consistent atomics and Java's volatile variables. 47 // 48 // [the Go memory model]: https://go.dev/ref/mem 49 package atomic 50 51 import ( 52 "unsafe" 53 ) 54 55 // BUG(rsc): On 386, the 64-bit functions use instructions unavailable before the Pentium MMX. 56 // 57 // On non-Linux ARM, the 64-bit functions use instructions unavailable before the ARMv6k core. 58 // 59 // On ARM, 386, and 32-bit MIPS, it is the caller's responsibility to arrange 60 // for 64-bit alignment of 64-bit words accessed atomically via the primitive 61 // atomic functions (types [Int64] and [Uint64] are automatically aligned). 62 // The first word in an allocated struct, array, or slice; in a global 63 // variable; or in a local variable (because on 32-bit architectures, the 64 // subject of 64-bit atomic operations will escape to the heap) can be 65 // relied upon to be 64-bit aligned. 66 67 // SwapInt32 atomically stores new into *addr and returns the previous *addr value. 68 // Consider using the more ergonomic and less error-prone [Int32.Swap] instead. 69 // 70 //go:noescape 71 func SwapInt32(addr *int32, new int32) (old int32) 72 73 // SwapUint32 atomically stores new into *addr and returns the previous *addr value. 74 // Consider using the more ergonomic and less error-prone [Uint32.Swap] instead. 75 // 76 //go:noescape 77 func SwapUint32(addr *uint32, new uint32) (old uint32) 78 79 // SwapUintptr atomically stores new into *addr and returns the previous *addr value. 80 // Consider using the more ergonomic and less error-prone [Uintptr.Swap] instead. 81 // 82 //go:noescape 83 func SwapUintptr(addr *uintptr, new uintptr) (old uintptr) 84 85 // SwapPointer atomically stores new into *addr and returns the previous *addr value. 86 // Consider using the more ergonomic and less error-prone [Pointer.Swap] instead. 87 func SwapPointer(addr *unsafe.Pointer, new unsafe.Pointer) (old unsafe.Pointer) 88 89 // CompareAndSwapInt32 executes the compare-and-swap operation for an int32 value. 90 // Consider using the more ergonomic and less error-prone [Int32.CompareAndSwap] instead. 91 // 92 //go:noescape 93 func CompareAndSwapInt32(addr *int32, old, new int32) (swapped bool) 94 95 // CompareAndSwapUint32 executes the compare-and-swap operation for a uint32 value. 96 // Consider using the more ergonomic and less error-prone [Uint32.CompareAndSwap] instead. 97 // 98 //go:noescape 99 func CompareAndSwapUint32(addr *uint32, old, new uint32) (swapped bool) 100 101 // CompareAndSwapUintptr executes the compare-and-swap operation for a uintptr value. 102 // Consider using the more ergonomic and less error-prone [Uintptr.CompareAndSwap] instead. 103 // 104 //go:noescape 105 func CompareAndSwapUintptr(addr *uintptr, old, new uintptr) (swapped bool) 106 107 // CompareAndSwapPointer executes the compare-and-swap operation for a unsafe.Pointer value. 108 // Consider using the more ergonomic and less error-prone [Pointer.CompareAndSwap] instead. 109 func CompareAndSwapPointer(addr *unsafe.Pointer, old, new unsafe.Pointer) (swapped bool) 110 111 // AddInt32 atomically adds delta to *addr and returns the new value. 112 // Consider using the more ergonomic and less error-prone [Int32.Add] instead. 113 // 114 //go:noescape 115 func AddInt32(addr *int32, delta int32) (new int32) 116 117 // AddUint32 atomically adds delta to *addr and returns the new value. 118 // To subtract a signed positive constant value c from x, do AddUint32(&x, ^uint32(c-1)). 119 // In particular, to decrement x, do AddUint32(&x, ^uint32(0)). 120 // Consider using the more ergonomic and less error-prone [Uint32.Add] instead. 121 // 122 //go:noescape 123 func AddUint32(addr *uint32, delta uint32) (new uint32) 124 125 // AddUintptr atomically adds delta to *addr and returns the new value. 126 // Consider using the more ergonomic and less error-prone [Uintptr.Add] instead. 127 // 128 //go:noescape 129 func AddUintptr(addr *uintptr, delta uintptr) (new uintptr) 130 131 // AndInt32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 132 // and returns the old value. 133 // Consider using the more ergonomic and less error-prone [Int32.And] instead. 134 // 135 //go:noescape 136 func AndInt32(addr *int32, mask int32) (old int32) 137 138 // AndUint32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 139 // and returns the old value. 140 // Consider using the more ergonomic and less error-prone [Uint32.And] instead. 141 // 142 //go:noescape 143 func AndUint32(addr *uint32, mask uint32) (old uint32) 144 145 // AndUintptr atomically performs a bitwise AND operation on *addr using the bitmask provided as mask 146 // and returns the old value. 147 // Consider using the more ergonomic and less error-prone [Uintptr.And] instead. 148 // 149 //go:noescape 150 func AndUintptr(addr *uintptr, mask uintptr) (old uintptr) 151 152 // OrInt32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 153 // and returns the old value. 154 // Consider using the more ergonomic and less error-prone [Int32.Or] instead. 155 // 156 //go:noescape 157 func OrInt32(addr *int32, mask int32) (old int32) 158 159 // OrUint32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 160 // and returns the old value. 161 // Consider using the more ergonomic and less error-prone [Uint32.Or] instead. 162 // 163 //go:noescape 164 func OrUint32(addr *uint32, mask uint32) (old uint32) 165 166 // OrUintptr atomically performs a bitwise OR operation on *addr using the bitmask provided as mask 167 // and returns the old value. 168 // Consider using the more ergonomic and less error-prone [Uintptr.Or] instead. 169 // 170 //go:noescape 171 func OrUintptr(addr *uintptr, mask uintptr) (old uintptr) 172 173 // LoadInt32 atomically loads *addr. 174 // Consider using the more ergonomic and less error-prone [Int32.Load] instead. 175 // 176 //go:noescape 177 func LoadInt32(addr *int32) (val int32) 178 179 // LoadUint32 atomically loads *addr. 180 // Consider using the more ergonomic and less error-prone [Uint32.Load] instead. 181 // 182 //go:noescape 183 func LoadUint32(addr *uint32) (val uint32) 184 185 // LoadUintptr atomically loads *addr. 186 // Consider using the more ergonomic and less error-prone [Uintptr.Load] instead. 187 // 188 //go:noescape 189 func LoadUintptr(addr *uintptr) (val uintptr) 190 191 // LoadPointer atomically loads *addr. 192 // Consider using the more ergonomic and less error-prone [Pointer.Load] instead. 193 func LoadPointer(addr *unsafe.Pointer) (val unsafe.Pointer) 194 195 // StoreInt32 atomically stores val into *addr. 196 // Consider using the more ergonomic and less error-prone [Int32.Store] instead. 197 // 198 //go:noescape 199 func StoreInt32(addr *int32, val int32) 200 201 // StoreUint32 atomically stores val into *addr. 202 // Consider using the more ergonomic and less error-prone [Uint32.Store] instead. 203 // 204 //go:noescape 205 func StoreUint32(addr *uint32, val uint32) 206 207 // StoreUintptr atomically stores val into *addr. 208 // Consider using the more ergonomic and less error-prone [Uintptr.Store] instead. 209 // 210 //go:noescape 211 func StoreUintptr(addr *uintptr, val uintptr) 212 213 // StorePointer atomically stores val into *addr. 214 // Consider using the more ergonomic and less error-prone [Pointer.Store] instead. 215 func StorePointer(addr *unsafe.Pointer, val unsafe.Pointer) 216