diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 13:14:23 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-28 13:14:23 +0000 |
commit | 73df946d56c74384511a194dd01dbe099584fd1a (patch) | |
tree | fd0bcea490dd81327ddfbb31e215439672c9a068 /src/runtime/atomic_pointer.go | |
parent | Initial commit. (diff) | |
download | golang-1.16-upstream.tar.xz golang-1.16-upstream.zip |
Adding upstream version 1.16.10.upstream/1.16.10upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'src/runtime/atomic_pointer.go')
-rw-r--r-- | src/runtime/atomic_pointer.go | 77 |
1 files changed, 77 insertions, 0 deletions
diff --git a/src/runtime/atomic_pointer.go b/src/runtime/atomic_pointer.go new file mode 100644 index 0000000..b8f0c22 --- /dev/null +++ b/src/runtime/atomic_pointer.go @@ -0,0 +1,77 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package runtime + +import ( + "runtime/internal/atomic" + "unsafe" +) + +// These functions cannot have go:noescape annotations, +// because while ptr does not escape, new does. +// If new is marked as not escaping, the compiler will make incorrect +// escape analysis decisions about the pointer value being stored. + +// atomicwb performs a write barrier before an atomic pointer write. +// The caller should guard the call with "if writeBarrier.enabled". +// +//go:nosplit +func atomicwb(ptr *unsafe.Pointer, new unsafe.Pointer) { + slot := (*uintptr)(unsafe.Pointer(ptr)) + if !getg().m.p.ptr().wbBuf.putFast(*slot, uintptr(new)) { + wbBufFlush(slot, uintptr(new)) + } +} + +// atomicstorep performs *ptr = new atomically and invokes a write barrier. +// +//go:nosplit +func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) { + if writeBarrier.enabled { + atomicwb((*unsafe.Pointer)(ptr), new) + } + atomic.StorepNoWB(noescape(ptr), new) +} + +// Like above, but implement in terms of sync/atomic's uintptr operations. +// We cannot just call the runtime routines, because the race detector expects +// to be able to intercept the sync/atomic forms but not the runtime forms. + +//go:linkname sync_atomic_StoreUintptr sync/atomic.StoreUintptr +func sync_atomic_StoreUintptr(ptr *uintptr, new uintptr) + +//go:linkname sync_atomic_StorePointer sync/atomic.StorePointer +//go:nosplit +func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) { + if writeBarrier.enabled { + atomicwb(ptr, new) + } + sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new)) +} + +//go:linkname sync_atomic_SwapUintptr sync/atomic.SwapUintptr +func sync_atomic_SwapUintptr(ptr *uintptr, new uintptr) uintptr + +//go:linkname sync_atomic_SwapPointer sync/atomic.SwapPointer +//go:nosplit +func sync_atomic_SwapPointer(ptr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer { + if writeBarrier.enabled { + atomicwb(ptr, new) + } + old := unsafe.Pointer(sync_atomic_SwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(new))) + return old +} + +//go:linkname sync_atomic_CompareAndSwapUintptr sync/atomic.CompareAndSwapUintptr +func sync_atomic_CompareAndSwapUintptr(ptr *uintptr, old, new uintptr) bool + +//go:linkname sync_atomic_CompareAndSwapPointer sync/atomic.CompareAndSwapPointer +//go:nosplit +func sync_atomic_CompareAndSwapPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool { + if writeBarrier.enabled { + atomicwb(ptr, new) + } + return sync_atomic_CompareAndSwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(old), uintptr(new)) +} |