// Copyright 2024 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

//go:build goexperiment.swissmap

package maps

import (
	
	
	
	
)

//go:linkname runtime_mapaccess1_fast32 runtime.mapaccess1_fast32
func runtime_mapaccess1_fast32( *abi.SwissMapType,  *Map,  uint32) unsafe.Pointer {
	if race.Enabled &&  != nil {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapaccess1)
		race.ReadPC(unsafe.Pointer(), , )
	}

	if  == nil || .Used() == 0 {
		return unsafe.Pointer(&zeroVal[0])
	}

	if .writing != 0 {
		fatal("concurrent map read and map write")
		return nil
	}

	if .dirLen == 0 {
		 := groupReference{
			data: .dirPtr,
		}
		 := .ctrls().matchFull()
		 := .key(, 0)
		 := .SlotSize
		for  != 0 {
			if  == *(*uint32)() && .lowestSet() {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				return 
			}
			 = unsafe.Pointer(uintptr() + )
			 = .shiftOutLowest()
		}
		return unsafe.Pointer(&zeroVal[0])
	}

	 := 
	 := .Hasher(abi.NoEscape(unsafe.Pointer(&)), .seed)

	// Select table.
	 := .directoryIndex()
	 := .directoryAt()

	// Probe table.
	 := makeProbeSeq(h1(), .groups.lengthMask)
	for ; ;  = .next() {
		 := .groups.group(, .offset)

		 := .ctrls().matchH2(h2())

		for  != 0 {
			 := .first()

			 := .key(, )
			if  == *(*uint32)() {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				return 
			}
			 = .removeFirst()
		}

		 = .ctrls().matchEmpty()
		if  != 0 {
			// Finding an empty slot means we've reached the end of
			// the probe sequence.
			return unsafe.Pointer(&zeroVal[0])
		}
	}
}

//go:linkname runtime_mapaccess2_fast32 runtime.mapaccess2_fast32
func runtime_mapaccess2_fast32( *abi.SwissMapType,  *Map,  uint32) (unsafe.Pointer, bool) {
	if race.Enabled &&  != nil {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapaccess1)
		race.ReadPC(unsafe.Pointer(), , )
	}

	if  == nil || .Used() == 0 {
		return unsafe.Pointer(&zeroVal[0]), false
	}

	if .writing != 0 {
		fatal("concurrent map read and map write")
		return nil, false
	}

	if .dirLen == 0 {
		 := groupReference{
			data: .dirPtr,
		}
		 := .ctrls().matchFull()
		 := .key(, 0)
		 := .SlotSize
		for  != 0 {
			if  == *(*uint32)() && .lowestSet() {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				return , true
			}
			 = unsafe.Pointer(uintptr() + )
			 = .shiftOutLowest()
		}
		return unsafe.Pointer(&zeroVal[0]), false
	}

	 := 
	 := .Hasher(abi.NoEscape(unsafe.Pointer(&)), .seed)

	// Select table.
	 := .directoryIndex()
	 := .directoryAt()

	// Probe table.
	 := makeProbeSeq(h1(), .groups.lengthMask)
	for ; ;  = .next() {
		 := .groups.group(, .offset)

		 := .ctrls().matchH2(h2())

		for  != 0 {
			 := .first()

			 := .key(, )
			if  == *(*uint32)() {
				 := unsafe.Pointer(uintptr() + .ElemOff)
				return , true
			}
			 = .removeFirst()
		}

		 = .ctrls().matchEmpty()
		if  != 0 {
			// Finding an empty slot means we've reached the end of
			// the probe sequence.
			return unsafe.Pointer(&zeroVal[0]), false
		}
	}
}

func ( *Map) ( *abi.SwissMapType,  uintptr,  uint32) unsafe.Pointer {
	 := groupReference{
		data: .dirPtr,
	}

	 := .ctrls().matchH2(h2())

	// Look for an existing slot containing this key.
	for  != 0 {
		 := .first()

		 := .key(, )
		if  == *(*uint32)() {
			 := .elem(, )
			return 
		}
		 = .removeFirst()
	}

	// There can't be deleted slots, small maps can't have them
	// (see deleteSmall). Use matchEmptyOrDeleted as it is a bit
	// more efficient than matchEmpty.
	 = .ctrls().matchEmptyOrDeleted()
	if  == 0 {
		fatal("small map with no empty slot (concurrent map writes?)")
	}

	 := .first()

	 := .key(, )
	*(*uint32)() = 

	 := .elem(, )

	.ctrls().set(, ctrl(h2()))
	.used++

	return 
}

//go:linkname runtime_mapassign_fast32 runtime.mapassign_fast32
func runtime_mapassign_fast32( *abi.SwissMapType,  *Map,  uint32) unsafe.Pointer {
	if  == nil {
		panic(errNilAssign)
	}
	if race.Enabled {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapassign)
		race.WritePC(unsafe.Pointer(), , )
	}
	if .writing != 0 {
		fatal("concurrent map writes")
	}

	 := 
	 := .Hasher(abi.NoEscape(unsafe.Pointer(&)), .seed)

	// Set writing after calling Hasher, since Hasher may panic, in which
	// case we have not actually done a write.
	.writing ^= 1 // toggle, see comment on writing

	if .dirPtr == nil {
		.growToSmall()
	}

	if .dirLen == 0 {
		if .used < abi.SwissMapGroupSlots {
			 := .putSlotSmallFast32(, , )

			if .writing == 0 {
				fatal("concurrent map writes")
			}
			.writing ^= 1

			return 
		}

		// Can't fit another entry, grow to full size map.
		.growToTable()
	}

	var  unsafe.Pointer
:
	for {
		// Select table.
		 := .directoryIndex()
		 := .directoryAt()

		 := makeProbeSeq(h1(), .groups.lengthMask)

		// As we look for a match, keep track of the first deleted slot
		// we find, which we'll use to insert the new entry if
		// necessary.
		var  groupReference
		var  uintptr

		for ; ;  = .next() {
			 := .groups.group(, .offset)
			 := .ctrls().matchH2(h2())

			// Look for an existing slot containing this key.
			for  != 0 {
				 := .first()

				 := .key(, )
				if  == *(*uint32)() {
					 = .elem(, )

					.checkInvariants(, )
					break 
				}
				 = .removeFirst()
			}

			// No existing slot for this key in this group. Is this the end
			// of the probe sequence?
			 = .ctrls().matchEmptyOrDeleted()
			if  == 0 {
				continue // nothing but filled slots. Keep probing.
			}
			 := .first()
			if .ctrls().get() == ctrlDeleted {
				// There are some deleted slots. Remember
				// the first one, and keep probing.
				if .data == nil {
					 = 
					 = 
				}
				continue
			}
			// We've found an empty slot, which means we've reached the end of
			// the probe sequence.

			// If we found a deleted slot along the way, we can
			// replace it without consuming growthLeft.
			if .data != nil {
				 = 
				 = 
				.growthLeft++ // will be decremented below to become a no-op.
			}

			// If there is room left to grow, just insert the new entry.
			if .growthLeft > 0 {
				 := .key(, )
				*(*uint32)() = 

				 = .elem(, )

				.ctrls().set(, ctrl(h2()))
				.growthLeft--
				.used++
				.used++

				.checkInvariants(, )
				break 
			}

			.rehash(, )
			continue 
		}
	}

	if .writing == 0 {
		fatal("concurrent map writes")
	}
	.writing ^= 1

	return 
}

// Key is a 32-bit pointer (only called on 32-bit GOARCH). This source is identical to fast64ptr.
//
// TODO(prattmic): With some compiler refactoring we could avoid duplication of this function.
//
//go:linkname runtime_mapassign_fast32ptr runtime.mapassign_fast32ptr
func runtime_mapassign_fast32ptr( *abi.SwissMapType,  *Map,  unsafe.Pointer) unsafe.Pointer {
	if  == nil {
		panic(errNilAssign)
	}
	if race.Enabled {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapassign)
		race.WritePC(unsafe.Pointer(), , )
	}
	if .writing != 0 {
		fatal("concurrent map writes")
	}

	 := 
	 := .Hasher(abi.NoEscape(unsafe.Pointer(&)), .seed)

	// Set writing after calling Hasher, since Hasher may panic, in which
	// case we have not actually done a write.
	.writing ^= 1 // toggle, see comment on writing

	if .dirPtr == nil {
		.growToSmall()
	}

	if .dirLen == 0 {
		if .used < abi.SwissMapGroupSlots {
			 := .putSlotSmallFastPtr(, , )

			if .writing == 0 {
				fatal("concurrent map writes")
			}
			.writing ^= 1

			return 
		}

		// Can't fit another entry, grow to full size map.
		.growToTable()
	}

	var  unsafe.Pointer
:
	for {
		// Select table.
		 := .directoryIndex()
		 := .directoryAt()

		 := makeProbeSeq(h1(), .groups.lengthMask)

		// As we look for a match, keep track of the first deleted slot we
		// find, which we'll use to insert the new entry if necessary.
		var  groupReference
		var  uintptr

		for ; ;  = .next() {
			 := .groups.group(, .offset)
			 := .ctrls().matchH2(h2())

			// Look for an existing slot containing this key.
			for  != 0 {
				 := .first()

				 := .key(, )
				if  == *(*unsafe.Pointer)() {
					 = .elem(, )

					.checkInvariants(, )
					break 
				}
				 = .removeFirst()
			}

			// No existing slot for this key in this group. Is this the end
			// of the probe sequence?
			 = .ctrls().matchEmptyOrDeleted()
			if  == 0 {
				continue // nothing but filled slots. Keep probing.
			}
			 := .first()
			if .ctrls().get() == ctrlDeleted {
				// There are some deleted slots. Remember
				// the first one, and keep probing.
				if .data == nil {
					 = 
					 = 
				}
				continue
			}
			// We've found an empty slot, which means we've reached the end of
			// the probe sequence.

			// If we found a deleted slot along the way, we can
			// replace it without consuming growthLeft.
			if .data != nil {
				 = 
				 = 
				.growthLeft++ // will be decremented below to become a no-op.
			}

			// If there is room left to grow, just insert the new entry.
			if .growthLeft > 0 {
				 := .key(, )
				*(*unsafe.Pointer)() = 

				 = .elem(, )

				.ctrls().set(, ctrl(h2()))
				.growthLeft--
				.used++
				.used++

				.checkInvariants(, )
				break 
			}

			.rehash(, )
			continue 
		}
	}

	if .writing == 0 {
		fatal("concurrent map writes")
	}
	.writing ^= 1

	return 
}

//go:linkname runtime_mapdelete_fast32 runtime.mapdelete_fast32
func runtime_mapdelete_fast32( *abi.SwissMapType,  *Map,  uint32) {
	if race.Enabled {
		 := sys.GetCallerPC()
		 := abi.FuncPCABIInternal(runtime_mapassign)
		race.WritePC(unsafe.Pointer(), , )
	}

	if  == nil || .Used() == 0 {
		return
	}

	.Delete(, abi.NoEscape(unsafe.Pointer(&)))
}