// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

//go:build goexperiment.swissmap

package maps

import (
	
	
	
	
	
	
)

// Functions below pushed from runtime.

//go:linkname mapKeyError
func mapKeyError( *abi.SwissMapType,  unsafe.Pointer) error

// Pushed from runtime in order to use runtime.plainError
//
//go:linkname errNilAssign
var errNilAssign error

// Pull from runtime. It is important that is this the exact same copy as the
// runtime because runtime.mapaccess1_fat compares the returned pointer with
// &runtime.zeroVal[0].
// TODO: move zeroVal to internal/abi?
//
//go:linkname zeroVal runtime.zeroVal
var zeroVal [abi.ZeroValSize]byte

// mapaccess1 returns a pointer to h[key].  Never returns nil, instead
// it will return a reference to the zero object for the elem type if
// the key is not in the map.
// NOTE: The returned pointer may keep the whole map live, so don't
// hold onto it for very long.
//
//go:linkname runtime_mapaccess1 runtime.mapaccess1
func runtime_mapaccess1( *abi.SwissMapType,  *Map,  unsafe.Pointer) unsafe.Pointer {
	if race.Enabled &&  != nil {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal()
		race.ReadPC(unsafe.Pointer(), , )
		race.ReadObjectPC(.Key, , , )
	}
	if msan.Enabled &&  != nil {
		msan.Read(, .Key.Size_)
	}
	if asan.Enabled &&  != nil {
		asan.Read(, .Key.Size_)
	}

	if  == nil || .Used() == 0 {
		if  := mapKeyError(, );  != nil {
			panic() // see issue 23734
		}
		return unsafe.Pointer(&zeroVal[0])
	}

	if .writing != 0 {
		fatal("concurrent map read and map write")
	}

	 := .Hasher(, .seed)

	if .dirLen <= 0 {
		, ,  := .getWithKeySmall(, , )
		if ! {
			return unsafe.Pointer(&zeroVal[0])
		}
		return 
	}

	// Select table.
	 := .directoryIndex()
	 := .directoryAt()

	// Probe table.
	 := makeProbeSeq(h1(), .groups.lengthMask)
	for ; ;  = .next() {
		 := .groups.group(, .offset)

		 := .ctrls().matchH2(h2())

		for  != 0 {
			 := .first()

			 := .key(, )
			 := 
			if .IndirectKey() {
				 = *((*unsafe.Pointer)())
			}
			if .Key.Equal(, ) {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				if .IndirectElem() {
					 = *((*unsafe.Pointer)())
				}
				return 
			}
			 = .removeFirst()
		}

		 = .ctrls().matchEmpty()
		if  != 0 {
			// Finding an empty slot means we've reached the end of
			// the probe sequence.
			return unsafe.Pointer(&zeroVal[0])
		}
	}
}

//go:linkname runtime_mapaccess2 runtime.mapaccess2
func runtime_mapaccess2( *abi.SwissMapType,  *Map,  unsafe.Pointer) (unsafe.Pointer, bool) {
	if race.Enabled &&  != nil {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapaccess1)
		race.ReadPC(unsafe.Pointer(), , )
		race.ReadObjectPC(.Key, , , )
	}
	if msan.Enabled &&  != nil {
		msan.Read(, .Key.Size_)
	}
	if asan.Enabled &&  != nil {
		asan.Read(, .Key.Size_)
	}

	if  == nil || .Used() == 0 {
		if  := mapKeyError(, );  != nil {
			panic() // see issue 23734
		}
		return unsafe.Pointer(&zeroVal[0]), false
	}

	if .writing != 0 {
		fatal("concurrent map read and map write")
	}

	 := .Hasher(, .seed)

	if .dirLen == 0 {
		, ,  := .getWithKeySmall(, , )
		if ! {
			return unsafe.Pointer(&zeroVal[0]), false
		}
		return , true
	}

	// Select table.
	 := .directoryIndex()
	 := .directoryAt()

	// Probe table.
	 := makeProbeSeq(h1(), .groups.lengthMask)
	for ; ;  = .next() {
		 := .groups.group(, .offset)

		 := .ctrls().matchH2(h2())

		for  != 0 {
			 := .first()

			 := .key(, )
			 := 
			if .IndirectKey() {
				 = *((*unsafe.Pointer)())
			}
			if .Key.Equal(, ) {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				if .IndirectElem() {
					 = *((*unsafe.Pointer)())
				}
				return , true
			}
			 = .removeFirst()
		}

		 = .ctrls().matchEmpty()
		if  != 0 {
			// Finding an empty slot means we've reached the end of
			// the probe sequence.
			return unsafe.Pointer(&zeroVal[0]), false
		}
	}
}

//go:linkname runtime_mapassign runtime.mapassign
func runtime_mapassign( *abi.SwissMapType,  *Map,  unsafe.Pointer) unsafe.Pointer {
	if  == nil {
		panic(errNilAssign)
	}
	if race.Enabled {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal()
		race.WritePC(unsafe.Pointer(), , )
		race.ReadObjectPC(.Key, , , )
	}
	if msan.Enabled {
		msan.Read(, .Key.Size_)
	}
	if asan.Enabled {
		asan.Read(, .Key.Size_)
	}
	if .writing != 0 {
		fatal("concurrent map writes")
	}

	 := .Hasher(, .seed)

	// Set writing after calling Hasher, since Hasher may panic, in which
	// case we have not actually done a write.
	.writing ^= 1 // toggle, see comment on writing

	if .dirPtr == nil {
		.growToSmall()
	}

	if .dirLen == 0 {
		if .used < abi.SwissMapGroupSlots {
			 := .putSlotSmall(, , )

			if .writing == 0 {
				fatal("concurrent map writes")
			}
			.writing ^= 1

			return 
		}

		// Can't fit another entry, grow to full size map.
		.growToTable()
	}

	var  unsafe.Pointer
:
	for {
		// Select table.
		 := .directoryIndex()
		 := .directoryAt()

		 := makeProbeSeq(h1(), .groups.lengthMask)

		// As we look for a match, keep track of the first deleted slot
		// we find, which we'll use to insert the new entry if
		// necessary.
		var  groupReference
		var  uintptr

		for ; ;  = .next() {
			 := .groups.group(, .offset)
			 := .ctrls().matchH2(h2())

			// Look for an existing slot containing this key.
			for  != 0 {
				 := .first()

				 := .key(, )
				 := 
				if .IndirectKey() {
					 = *((*unsafe.Pointer)())
				}
				if .Key.Equal(, ) {
					if .NeedKeyUpdate() {
						typedmemmove(.Key, , )
					}

					 = unsafe.Pointer(uintptr() + .ElemOff)
					if .IndirectElem() {
						 = *((*unsafe.Pointer)())
					}

					.checkInvariants(, )
					break 
				}
				 = .removeFirst()
			}

			// No existing slot for this key in this group. Is this the end
			// of the probe sequence?
			 = .ctrls().matchEmpty()
			if  != 0 {
				// Finding an empty slot means we've reached the end of
				// the probe sequence.

				var  uintptr

				// If we found a deleted slot along the way, we
				// can replace it without consuming growthLeft.
				if .data != nil {
					 = 
					 = 
					.growthLeft++ // will be decremented below to become a no-op.
				} else {
					// Otherwise, use the empty slot.
					 = .first()
				}

				// If there is room left to grow, just insert the new entry.
				if .growthLeft > 0 {
					 := .key(, )
					 := 
					if .IndirectKey() {
						 := newobject(.Key)
						*(*unsafe.Pointer)() = 
						 = 
					}
					typedmemmove(.Key, , )

					 = unsafe.Pointer(uintptr() + .ElemOff)
					if .IndirectElem() {
						 := newobject(.Elem)
						*(*unsafe.Pointer)() = 
						 = 
					}

					.ctrls().set(, ctrl(h2()))
					.growthLeft--
					.used++
					.used++

					.checkInvariants(, )
					break 
				}

				.rehash(, )
				continue 
			}

			// No empty slots in this group. Check for a deleted
			// slot, which we'll use if we don't find a match later
			// in the probe sequence.
			//
			// We only need to remember a single deleted slot.
			if .data == nil {
				// Since we already checked for empty slots
				// above, matches here must be deleted slots.
				 = .ctrls().matchEmptyOrDeleted()
				if  != 0 {
					 = 
					 = .first()
				}
			}
		}
	}

	if .writing == 0 {
		fatal("concurrent map writes")
	}
	.writing ^= 1

	return 
}