Source File
types.go
Belonging Package
internal/runtime/atomic
// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package atomic
import
// Int32 is an atomically accessed int32 value.
//
// An Int32 must not be copied.
type Int32 struct {
noCopy noCopy
value int32
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Int32) () int32 {
return Loadint32(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Int32) ( int32) {
Storeint32(&.value, )
}
// CompareAndSwap atomically compares i's value with old,
// and if they're equal, swaps i's value with new.
// It reports whether the swap ran.
//
//go:nosplit
func ( *Int32) (, int32) bool {
return Casint32(&.value, , )
}
// Swap replaces i's value with new, returning
// i's value before the replacement.
//
//go:nosplit
func ( *Int32) ( int32) int32 {
return Xchgint32(&.value, )
}
// Add adds delta to i atomically, returning
// the new updated value.
//
// This operation wraps around in the usual
// two's-complement way.
//
//go:nosplit
func ( *Int32) ( int32) int32 {
return Xaddint32(&.value, )
}
// Int64 is an atomically accessed int64 value.
//
// 8-byte aligned on all platforms, unlike a regular int64.
//
// An Int64 must not be copied.
type Int64 struct {
noCopy noCopy
_ align64
value int64
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Int64) () int64 {
return Loadint64(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Int64) ( int64) {
Storeint64(&.value, )
}
// CompareAndSwap atomically compares i's value with old,
// and if they're equal, swaps i's value with new.
// It reports whether the swap ran.
//
//go:nosplit
func ( *Int64) (, int64) bool {
return Casint64(&.value, , )
}
// Swap replaces i's value with new, returning
// i's value before the replacement.
//
//go:nosplit
func ( *Int64) ( int64) int64 {
return Xchgint64(&.value, )
}
// Add adds delta to i atomically, returning
// the new updated value.
//
// This operation wraps around in the usual
// two's-complement way.
//
//go:nosplit
func ( *Int64) ( int64) int64 {
return Xaddint64(&.value, )
}
// Uint8 is an atomically accessed uint8 value.
//
// A Uint8 must not be copied.
type Uint8 struct {
noCopy noCopy
value uint8
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Uint8) () uint8 {
return Load8(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Uint8) ( uint8) {
Store8(&.value, )
}
// And takes value and performs a bit-wise
// "and" operation with the value of u, storing
// the result into u.
//
// The full process is performed atomically.
//
//go:nosplit
func ( *Uint8) ( uint8) {
And8(&.value, )
}
// Or takes value and performs a bit-wise
// "or" operation with the value of u, storing
// the result into u.
//
// The full process is performed atomically.
//
//go:nosplit
func ( *Uint8) ( uint8) {
Or8(&.value, )
}
// Bool is an atomically accessed bool value.
//
// A Bool must not be copied.
type Bool struct {
// Inherits noCopy from Uint8.
u Uint8
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Bool) () bool {
return .u.Load() != 0
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Bool) ( bool) {
:= uint8(0)
if {
= 1
}
.u.Store()
}
// Uint32 is an atomically accessed uint32 value.
//
// A Uint32 must not be copied.
type Uint32 struct {
noCopy noCopy
value uint32
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Uint32) () uint32 {
return Load(&.value)
}
// LoadAcquire is a partially unsynchronized version
// of Load that relaxes ordering constraints. Other threads
// may observe operations that precede this operation to
// occur after it, but no operation that occurs after it
// on this thread can be observed to occur before it.
//
// WARNING: Use sparingly and with great care.
//
//go:nosplit
func ( *Uint32) () uint32 {
return LoadAcq(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Uint32) ( uint32) {
Store(&.value, )
}
// StoreRelease is a partially unsynchronized version
// of Store that relaxes ordering constraints. Other threads
// may observe operations that occur after this operation to
// precede it, but no operation that precedes it
// on this thread can be observed to occur after it.
//
// WARNING: Use sparingly and with great care.
//
//go:nosplit
func ( *Uint32) ( uint32) {
StoreRel(&.value, )
}
// CompareAndSwap atomically compares u's value with old,
// and if they're equal, swaps u's value with new.
// It reports whether the swap ran.
//
//go:nosplit
func ( *Uint32) (, uint32) bool {
return Cas(&.value, , )
}
// CompareAndSwapRelease is a partially unsynchronized version
// of Cas that relaxes ordering constraints. Other threads
// may observe operations that occur after this operation to
// precede it, but no operation that precedes it
// on this thread can be observed to occur after it.
// It reports whether the swap ran.
//
// WARNING: Use sparingly and with great care.
//
//go:nosplit
func ( *Uint32) (, uint32) bool {
return CasRel(&.value, , )
}
// Swap replaces u's value with new, returning
// u's value before the replacement.
//
//go:nosplit
func ( *Uint32) ( uint32) uint32 {
return Xchg(&.value, )
}
// And takes value and performs a bit-wise
// "and" operation with the value of u, storing
// the result into u.
//
// The full process is performed atomically.
//
//go:nosplit
func ( *Uint32) ( uint32) {
And(&.value, )
}
// Or takes value and performs a bit-wise
// "or" operation with the value of u, storing
// the result into u.
//
// The full process is performed atomically.
//
//go:nosplit
func ( *Uint32) ( uint32) {
Or(&.value, )
}
// Add adds delta to u atomically, returning
// the new updated value.
//
// This operation wraps around in the usual
// two's-complement way.
//
//go:nosplit
func ( *Uint32) ( int32) uint32 {
return Xadd(&.value, )
}
// Uint64 is an atomically accessed uint64 value.
//
// 8-byte aligned on all platforms, unlike a regular uint64.
//
// A Uint64 must not be copied.
type Uint64 struct {
noCopy noCopy
_ align64
value uint64
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Uint64) () uint64 {
return Load64(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Uint64) ( uint64) {
Store64(&.value, )
}
// CompareAndSwap atomically compares u's value with old,
// and if they're equal, swaps u's value with new.
// It reports whether the swap ran.
//
//go:nosplit
func ( *Uint64) (, uint64) bool {
return Cas64(&.value, , )
}
// Swap replaces u's value with new, returning
// u's value before the replacement.
//
//go:nosplit
func ( *Uint64) ( uint64) uint64 {
return Xchg64(&.value, )
}
// Add adds delta to u atomically, returning
// the new updated value.
//
// This operation wraps around in the usual
// two's-complement way.
//
//go:nosplit
func ( *Uint64) ( int64) uint64 {
return Xadd64(&.value, )
}
// Uintptr is an atomically accessed uintptr value.
//
// A Uintptr must not be copied.
type Uintptr struct {
noCopy noCopy
value uintptr
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Uintptr) () uintptr {
return Loaduintptr(&.value)
}
// LoadAcquire is a partially unsynchronized version
// of Load that relaxes ordering constraints. Other threads
// may observe operations that precede this operation to
// occur after it, but no operation that occurs after it
// on this thread can be observed to occur before it.
//
// WARNING: Use sparingly and with great care.
//
//go:nosplit
func ( *Uintptr) () uintptr {
return LoadAcquintptr(&.value)
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Uintptr) ( uintptr) {
Storeuintptr(&.value, )
}
// StoreRelease is a partially unsynchronized version
// of Store that relaxes ordering constraints. Other threads
// may observe operations that occur after this operation to
// precede it, but no operation that precedes it
// on this thread can be observed to occur after it.
//
// WARNING: Use sparingly and with great care.
//
//go:nosplit
func ( *Uintptr) ( uintptr) {
StoreReluintptr(&.value, )
}
// CompareAndSwap atomically compares u's value with old,
// and if they're equal, swaps u's value with new.
// It reports whether the swap ran.
//
//go:nosplit
func ( *Uintptr) (, uintptr) bool {
return Casuintptr(&.value, , )
}
// Swap replaces u's value with new, returning
// u's value before the replacement.
//
//go:nosplit
func ( *Uintptr) ( uintptr) uintptr {
return Xchguintptr(&.value, )
}
// Add adds delta to u atomically, returning
// the new updated value.
//
// This operation wraps around in the usual
// two's-complement way.
//
//go:nosplit
func ( *Uintptr) ( uintptr) uintptr {
return Xadduintptr(&.value, )
}
// Float64 is an atomically accessed float64 value.
//
// 8-byte aligned on all platforms, unlike a regular float64.
//
// A Float64 must not be copied.
type Float64 struct {
// Inherits noCopy and align64 from Uint64.
u Uint64
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Float64) () float64 {
:= .u.Load()
return *(*float64)(unsafe.Pointer(&))
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Float64) ( float64) {
.u.Store(*(*uint64)(unsafe.Pointer(&)))
}
// UnsafePointer is an atomically accessed unsafe.Pointer value.
//
// Note that because of the atomicity guarantees, stores to values
// of this type never trigger a write barrier, and the relevant
// methods are suffixed with "NoWB" to indicate that explicitly.
// As a result, this type should be used carefully, and sparingly,
// mostly with values that do not live in the Go heap anyway.
//
// An UnsafePointer must not be copied.
type UnsafePointer struct {
noCopy noCopy
value unsafe.Pointer
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *UnsafePointer) () unsafe.Pointer {
return Loadp(unsafe.Pointer(&.value))
}
// StoreNoWB updates the value atomically.
//
// WARNING: As the name implies this operation does *not*
// perform a write barrier on value, and so this operation may
// hide pointers from the GC. Use with care and sparingly.
// It is safe to use with values not found in the Go heap.
// Prefer Store instead.
//
//go:nosplit
func ( *UnsafePointer) ( unsafe.Pointer) {
StorepNoWB(unsafe.Pointer(&.value), )
}
// Store updates the value atomically.
func ( *UnsafePointer) ( unsafe.Pointer) {
storePointer(&.value, )
}
// provided by runtime
//
//go:linkname storePointer
func storePointer( *unsafe.Pointer, unsafe.Pointer)
// CompareAndSwapNoWB atomically (with respect to other methods)
// compares u's value with old, and if they're equal,
// swaps u's value with new.
// It reports whether the swap ran.
//
// WARNING: As the name implies this operation does *not*
// perform a write barrier on value, and so this operation may
// hide pointers from the GC. Use with care and sparingly.
// It is safe to use with values not found in the Go heap.
// Prefer CompareAndSwap instead.
//
//go:nosplit
func ( *UnsafePointer) (, unsafe.Pointer) bool {
return Casp1(&.value, , )
}
// CompareAndSwap atomically compares u's value with old,
// and if they're equal, swaps u's value with new.
// It reports whether the swap ran.
func ( *UnsafePointer) (, unsafe.Pointer) bool {
return casPointer(&.value, , )
}
func casPointer( *unsafe.Pointer, , unsafe.Pointer) bool
// Pointer is an atomic pointer of type *T.
type Pointer[ any] struct {
u UnsafePointer
}
// Load accesses and returns the value atomically.
//
//go:nosplit
func ( *Pointer[]) () * {
return (*)(.u.Load())
}
// StoreNoWB updates the value atomically.
//
// WARNING: As the name implies this operation does *not*
// perform a write barrier on value, and so this operation may
// hide pointers from the GC. Use with care and sparingly.
// It is safe to use with values not found in the Go heap.
// Prefer Store instead.
//
//go:nosplit
func ( *Pointer[]) ( *) {
.u.StoreNoWB(unsafe.Pointer())
}
// Store updates the value atomically.
//
//go:nosplit
func ( *Pointer[]) ( *) {
.u.Store(unsafe.Pointer())
}
// CompareAndSwapNoWB atomically (with respect to other methods)
// compares u's value with old, and if they're equal,
// swaps u's value with new.
// It reports whether the swap ran.
//
// WARNING: As the name implies this operation does *not*
// perform a write barrier on value, and so this operation may
// hide pointers from the GC. Use with care and sparingly.
// It is safe to use with values not found in the Go heap.
// Prefer CompareAndSwap instead.
//
//go:nosplit
func ( *Pointer[]) (, *) bool {
return .u.CompareAndSwapNoWB(unsafe.Pointer(), unsafe.Pointer())
}
// CompareAndSwap atomically (with respect to other methods)
// compares u's value with old, and if they're equal,
// swaps u's value with new.
// It reports whether the swap ran.
func ( *Pointer[]) (, *) bool {
return .u.CompareAndSwap(unsafe.Pointer(), unsafe.Pointer())
}
// noCopy may be embedded into structs which must not be copied
// after the first use.
//
// See https://golang.org/issues/8005#issuecomment-190753527
// for details.
type noCopy struct{}
// Lock is a no-op used by -copylocks checker from `go vet`.
func (*noCopy) () {}
func (*noCopy) () {}
// align64 may be added to structs that must be 64-bit aligned.
// This struct is recognized by a special case in the compiler
// and will not work if copied to any other package.
type align64 struct{}
The pages are generated with Golds v0.6.9-preview. (GOOS=linux GOARCH=amd64) Golds is a Go 101 project developed by Tapir Liu. PR and bug reports are welcome and can be submitted to the issue list. Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |