// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

package reflect

import (
	
	
	
	
	
	
	
	
)

// Value is the reflection interface to a Go value.
//
// Not all methods apply to all kinds of values. Restrictions,
// if any, are noted in the documentation for each method.
// Use the Kind method to find out the kind of value before
// calling kind-specific methods. Calling a method
// inappropriate to the kind of type causes a run time panic.
//
// The zero Value represents no value.
// Its [Value.IsValid] method returns false, its Kind method returns [Invalid],
// its String method returns "<invalid Value>", and all other methods panic.
// Most functions and methods never return an invalid value.
// If one does, its documentation states the conditions explicitly.
//
// A Value can be used concurrently by multiple goroutines provided that
// the underlying Go value can be used concurrently for the equivalent
// direct operations.
//
// To compare two Values, compare the results of the Interface method.
// Using == on two Values does not compare the underlying values
// they represent.
type Value struct {
	// typ_ holds the type of the value represented by a Value.
	// Access using the typ method to avoid escape of v.
	typ_ *abi.Type

	// Pointer-valued data or, if flagIndir is set, pointer to data.
	// Valid when either flagIndir is set or typ.pointers() is true.
	ptr unsafe.Pointer

	// flag holds metadata about the value.
	//
	// The lowest five bits give the Kind of the value, mirroring typ.Kind().
	//
	// The next set of bits are flag bits:
	//	- flagStickyRO: obtained via unexported not embedded field, so read-only
	//	- flagEmbedRO: obtained via unexported embedded field, so read-only
	//	- flagIndir: val holds a pointer to the data
	//	- flagAddr: v.CanAddr is true (implies flagIndir and ptr is non-nil)
	//	- flagMethod: v is a method value.
	// If ifaceIndir(typ), code can assume that flagIndir is set.
	//
	// The remaining 22+ bits give a method number for method values.
	// If flag.kind() != Func, code can assume that flagMethod is unset.
	flag

	// A method value represents a curried method invocation
	// like r.Read for some receiver r. The typ+val+flag bits describe
	// the receiver r, but the flag's Kind bits say Func (methods are
	// functions), and the top bits of the flag give the method number
	// in r's type's method table.
}

type flag uintptr

const (
	flagKindWidth        = 5 // there are 27 kinds
	flagKindMask    flag = 1<<flagKindWidth - 1
	flagStickyRO    flag = 1 << 5
	flagEmbedRO     flag = 1 << 6
	flagIndir       flag = 1 << 7
	flagAddr        flag = 1 << 8
	flagMethod      flag = 1 << 9
	flagMethodShift      = 10
	flagRO          flag = flagStickyRO | flagEmbedRO
)

func ( flag) () Kind {
	return Kind( & flagKindMask)
}

func ( flag) () flag {
	if &flagRO != 0 {
		return flagStickyRO
	}
	return 0
}

func ( Value) () *abi.Type {
	// Types are either static (for compiler-created types) or
	// heap-allocated but always reachable (for reflection-created
	// types, held in the central map). So there is no need to
	// escape types. noescape here help avoid unnecessary escape
	// of v.
	return (*abi.Type)(abi.NoEscape(unsafe.Pointer(.typ_)))
}

// pointer returns the underlying pointer represented by v.
// v.Kind() must be Pointer, Map, Chan, Func, or UnsafePointer
// if v.Kind() == Pointer, the base type must not be not-in-heap.
func ( Value) () unsafe.Pointer {
	if .typ().Size() != goarch.PtrSize || !.typ().Pointers() {
		panic("can't call pointer on a non-pointer Value")
	}
	if .flag&flagIndir != 0 {
		return *(*unsafe.Pointer)(.ptr)
	}
	return .ptr
}

// packEface converts v to the empty interface.
func packEface( Value) any {
	 := .typ()
	var  any
	 := (*abi.EmptyInterface)(unsafe.Pointer(&))
	// First, fill in the data portion of the interface.
	switch {
	case .IfaceIndir():
		if .flag&flagIndir == 0 {
			panic("bad indir")
		}
		// Value is indirect, and so is the interface we're making.
		 := .ptr
		if .flag&flagAddr != 0 {
			 := unsafe_New()
			typedmemmove(, , )
			 = 
		}
		.Data = 
	case .flag&flagIndir != 0:
		// Value is indirect, but interface is direct. We need
		// to load the data at v.ptr into the interface data word.
		.Data = *(*unsafe.Pointer)(.ptr)
	default:
		// Value is direct, and so is the interface.
		.Data = .ptr
	}
	// Now, fill in the type portion. We're very careful here not
	// to have any operation between the e.word and e.typ assignments
	// that would let the garbage collector observe the partially-built
	// interface value.
	.Type = 
	return 
}

// unpackEface converts the empty interface i to a Value.
func unpackEface( any) Value {
	 := (*abi.EmptyInterface)(unsafe.Pointer(&))
	// NOTE: don't read e.word until we know whether it is really a pointer or not.
	 := .Type
	if  == nil {
		return Value{}
	}
	 := flag(.Kind())
	if .IfaceIndir() {
		 |= flagIndir
	}
	return Value{, .Data, }
}

// A ValueError occurs when a Value method is invoked on
// a [Value] that does not support it. Such cases are documented
// in the description of each method.
type ValueError struct {
	Method string
	Kind   Kind
}

func ( *ValueError) () string {
	if .Kind == 0 {
		return "reflect: call of " + .Method + " on zero Value"
	}
	return "reflect: call of " + .Method + " on " + .Kind.String() + " Value"
}

// valueMethodName returns the name of the exported calling method on Value.
func valueMethodName() string {
	var  [5]uintptr
	 := runtime.Callers(1, [:])
	 := runtime.CallersFrames([:])
	var  runtime.Frame
	for  := true; ; {
		const  = "reflect.Value."
		,  = .Next()
		 := .Function
		if len() > len() && [:len()] ==  {
			 := [len():]
			if len() > 0 && 'A' <= [0] && [0] <= 'Z' {
				return 
			}
		}
	}
	return "unknown method"
}

// nonEmptyInterface is the header for an interface value with methods.
type nonEmptyInterface struct {
	itab *abi.ITab
	word unsafe.Pointer
}

// mustBe panics if f's kind is not expected.
// Making this a method on flag instead of on Value
// (and embedding flag in Value) means that we can write
// the very clear v.mustBe(Bool) and have it compile into
// v.flag.mustBe(Bool), which will only bother to copy the
// single important word for the receiver.
func ( flag) ( Kind) {
	// TODO(mvdan): use f.kind() again once mid-stack inlining gets better
	if Kind(&flagKindMask) !=  {
		panic(&ValueError{valueMethodName(), .kind()})
	}
}

// mustBeExported panics if f records that the value was obtained using
// an unexported field.
func ( flag) () {
	if  == 0 || &flagRO != 0 {
		.mustBeExportedSlow()
	}
}

func ( flag) () {
	if  == 0 {
		panic(&ValueError{valueMethodName(), Invalid})
	}
	if &flagRO != 0 {
		panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
	}
}

// mustBeAssignable panics if f records that the value is not assignable,
// which is to say that either it was obtained using an unexported field
// or it is not addressable.
func ( flag) () {
	if &flagRO != 0 || &flagAddr == 0 {
		.mustBeAssignableSlow()
	}
}

func ( flag) () {
	if  == 0 {
		panic(&ValueError{valueMethodName(), Invalid})
	}
	// Assignable if addressable and not read-only.
	if &flagRO != 0 {
		panic("reflect: " + valueMethodName() + " using value obtained using unexported field")
	}
	if &flagAddr == 0 {
		panic("reflect: " + valueMethodName() + " using unaddressable value")
	}
}

// Addr returns a pointer value representing the address of v.
// It panics if [Value.CanAddr] returns false.
// Addr is typically used to obtain a pointer to a struct field
// or slice element in order to call a method that requires a
// pointer receiver.
func ( Value) () Value {
	if .flag&flagAddr == 0 {
		panic("reflect.Value.Addr of unaddressable value")
	}
	// Preserve flagRO instead of using v.flag.ro() so that
	// v.Addr().Elem() is equivalent to v (#32772)
	 := .flag & flagRO
	return Value{ptrTo(.typ()), .ptr,  | flag(Pointer)}
}

// Bool returns v's underlying value.
// It panics if v's kind is not [Bool].
func ( Value) () bool {
	// panicNotBool is split out to keep Bool inlineable.
	if .kind() != Bool {
		.panicNotBool()
	}
	return *(*bool)(.ptr)
}

func ( Value) () {
	.mustBe(Bool)
}

var bytesType = rtypeOf(([]byte)(nil))

// Bytes returns v's underlying value.
// It panics if v's underlying value is not a slice of bytes or
// an addressable array of bytes.
func ( Value) () []byte {
	// bytesSlow is split out to keep Bytes inlineable for unnamed []byte.
	if .typ_ == bytesType { // ok to use v.typ_ directly as comparison doesn't cause escape
		return *(*[]byte)(.ptr)
	}
	return .bytesSlow()
}

func ( Value) () []byte {
	switch .kind() {
	case Slice:
		if .typ().Elem().Kind() != abi.Uint8 {
			panic("reflect.Value.Bytes of non-byte slice")
		}
		// Slice is always bigger than a word; assume flagIndir.
		return *(*[]byte)(.ptr)
	case Array:
		if .typ().Elem().Kind() != abi.Uint8 {
			panic("reflect.Value.Bytes of non-byte array")
		}
		if !.CanAddr() {
			panic("reflect.Value.Bytes of unaddressable byte array")
		}
		 := (*byte)(.ptr)
		 := int((*arrayType)(unsafe.Pointer(.typ())).Len)
		return unsafe.Slice(, )
	}
	panic(&ValueError{"reflect.Value.Bytes", .kind()})
}

// runes returns v's underlying value.
// It panics if v's underlying value is not a slice of runes (int32s).
func ( Value) () []rune {
	.mustBe(Slice)
	if .typ().Elem().Kind() != abi.Int32 {
		panic("reflect.Value.Bytes of non-rune slice")
	}
	// Slice is always bigger than a word; assume flagIndir.
	return *(*[]rune)(.ptr)
}

// CanAddr reports whether the value's address can be obtained with [Value.Addr].
// Such values are called addressable. A value is addressable if it is
// an element of a slice, an element of an addressable array,
// a field of an addressable struct, or the result of dereferencing a pointer.
// If CanAddr returns false, calling [Value.Addr] will panic.
func ( Value) () bool {
	return .flag&flagAddr != 0
}

// CanSet reports whether the value of v can be changed.
// A [Value] can be changed only if it is addressable and was not
// obtained by the use of unexported struct fields.
// If CanSet returns false, calling [Value.Set] or any type-specific
// setter (e.g., [Value.SetBool], [Value.SetInt]) will panic.
func ( Value) () bool {
	return .flag&(flagAddr|flagRO) == flagAddr
}

// Call calls the function v with the input arguments in.
// For example, if len(in) == 3, v.Call(in) represents the Go call v(in[0], in[1], in[2]).
// Call panics if v's Kind is not [Func].
// It returns the output results as Values.
// As in Go, each input argument must be assignable to the
// type of the function's corresponding input parameter.
// If v is a variadic function, Call creates the variadic slice parameter
// itself, copying in the corresponding values.
func ( Value) ( []Value) []Value {
	.mustBe(Func)
	.mustBeExported()
	return .call("Call", )
}

// CallSlice calls the variadic function v with the input arguments in,
// assigning the slice in[len(in)-1] to v's final variadic argument.
// For example, if len(in) == 3, v.CallSlice(in) represents the Go call v(in[0], in[1], in[2]...).
// CallSlice panics if v's Kind is not [Func] or if v is not variadic.
// It returns the output results as Values.
// As in Go, each input argument must be assignable to the
// type of the function's corresponding input parameter.
func ( Value) ( []Value) []Value {
	.mustBe(Func)
	.mustBeExported()
	return .call("CallSlice", )
}

var callGC bool // for testing; see TestCallMethodJump and TestCallArgLive

const debugReflectCall = false

func ( Value) ( string,  []Value) []Value {
	// Get function pointer, type.
	 := (*funcType)(unsafe.Pointer(.typ()))
	var (
		       unsafe.Pointer
		     Value
		 *abi.Type
	)
	if .flag&flagMethod != 0 {
		 = 
		, ,  = methodReceiver(, , int(.flag)>>flagMethodShift)
	} else if .flag&flagIndir != 0 {
		 = *(*unsafe.Pointer)(.ptr)
	} else {
		 = .ptr
	}

	if  == nil {
		panic("reflect.Value.Call: call of nil function")
	}

	 :=  == "CallSlice"
	 := .NumIn()
	 := .IsVariadic()
	if  {
		if ! {
			panic("reflect: CallSlice of non-variadic function")
		}
		if len() <  {
			panic("reflect: CallSlice with too few input arguments")
		}
		if len() >  {
			panic("reflect: CallSlice with too many input arguments")
		}
	} else {
		if  {
			--
		}
		if len() <  {
			panic("reflect: Call with too few input arguments")
		}
		if ! && len() >  {
			panic("reflect: Call with too many input arguments")
		}
	}
	for ,  := range  {
		if .Kind() == Invalid {
			panic("reflect: " +  + " using zero Value argument")
		}
	}
	for  := 0;  < ; ++ {
		if ,  := [].Type(), .In(); !.AssignableTo(toRType()) {
			panic("reflect: " +  + " using " + .String() + " as type " + stringFor())
		}
	}
	if ! &&  {
		// prepare slice for remaining values
		 := len() - 
		 := MakeSlice(toRType(.In()), , )
		 := toRType(.In()).Elem() // FIXME cast to slice type and Elem()
		for  := 0;  < ; ++ {
			 := [+]
			if  := .Type(); !.AssignableTo() {
				panic("reflect: cannot use " + .String() + " as type " + .String() + " in " + )
			}
			.Index().Set()
		}
		 := 
		 = make([]Value, +1)
		copy([:], )
		[] = 
	}

	 := len()
	if  != .NumIn() {
		panic("reflect.Value.Call: wrong argument count")
	}
	 := .NumOut()

	// Register argument space.
	var  abi.RegArgs

	// Compute frame type.
	, ,  := funcLayout(, )

	// Allocate a chunk of memory for frame if needed.
	var  unsafe.Pointer
	if .Size() != 0 {
		if  == 0 {
			 = .Get().(unsafe.Pointer)
		} else {
			// Can't use pool if the function has return values.
			// We will leak pointer to args in ret, so its lifetime is not scoped.
			 = unsafe_New()
		}
	}
	 := .Size()

	if debugReflectCall {
		println("reflect.call", stringFor(&.Type))
		.dump()
	}

	// Copy inputs into args.

	// Handle receiver.
	 := 0
	if  != nil {
		// Guaranteed to only be one word in size,
		// so it will only take up exactly 1 abiStep (either
		// in a register or on the stack).
		switch  := .call.steps[0]; .kind {
		case abiStepStack:
			storeRcvr(, )
		case abiStepPointer:
			storeRcvr(, unsafe.Pointer(&.Ptrs[.ireg]))
			fallthrough
		case abiStepIntReg:
			storeRcvr(, unsafe.Pointer(&.Ints[.ireg]))
		case abiStepFloatReg:
			storeRcvr(, unsafe.Pointer(&.Floats[.freg]))
		default:
			panic("unknown ABI parameter kind")
		}
		 = 1
	}

	// Handle arguments.
	for ,  := range  {
		.mustBeExported()
		 := toRType(.In())
		// TODO(mknyszek): Figure out if it's possible to get some
		// scratch space for this assignment check. Previously, it
		// was possible to use space in the argument frame.
		 = .assignTo("reflect.Value.Call", &.t, nil)
	:
		for ,  := range .call.stepsForValue( + ) {
			switch .kind {
			case abiStepStack:
				// Copy values to the "stack."
				 := add(, .stkOff, "precomputed stack arg offset")
				if .flag&flagIndir != 0 {
					typedmemmove(&.t, , .ptr)
				} else {
					*(*unsafe.Pointer)() = .ptr
				}
				// There's only one step for a stack-allocated value.
				break 
			case abiStepIntReg, abiStepPointer:
				// Copy values to "integer registers."
				if .flag&flagIndir != 0 {
					 := add(.ptr, .offset, "precomputed value offset")
					if .kind == abiStepPointer {
						// Duplicate this pointer in the pointer area of the
						// register space. Otherwise, there's the potential for
						// this to be the last reference to v.ptr.
						.Ptrs[.ireg] = *(*unsafe.Pointer)()
					}
					intToReg(&, .ireg, .size, )
				} else {
					if .kind == abiStepPointer {
						// See the comment in abiStepPointer case above.
						.Ptrs[.ireg] = .ptr
					}
					.Ints[.ireg] = uintptr(.ptr)
				}
			case abiStepFloatReg:
				// Copy values to "float registers."
				if .flag&flagIndir == 0 {
					panic("attempted to copy pointer to FP register")
				}
				 := add(.ptr, .offset, "precomputed value offset")
				floatToReg(&, .freg, .size, )
			default:
				panic("unknown ABI part kind")
			}
		}
	}
	// TODO(mknyszek): Remove this when we no longer have
	// caller reserved spill space.
	 = align(, goarch.PtrSize)
	 += .spill

	// Mark pointers in registers for the return path.
	.ReturnIsPtr = .outRegPtrs

	if debugReflectCall {
		.Dump()
	}

	// For testing; see TestCallArgLive.
	if callGC {
		runtime.GC()
	}

	// Call.
	call(, , , uint32(.Size()), uint32(.retOffset), uint32(), &)

	// For testing; see TestCallMethodJump.
	if callGC {
		runtime.GC()
	}

	var  []Value
	if  == 0 {
		if  != nil {
			typedmemclr(, )
			.Put()
		}
	} else {
		if  != nil {
			// Zero the now unused input area of args,
			// because the Values returned by this function contain pointers to the args object,
			// and will thus keep the args object alive indefinitely.
			typedmemclrpartial(, , 0, .retOffset)
		}

		// Wrap Values around return values in args.
		 = make([]Value, )
		for  := 0;  < ; ++ {
			 := .Out()
			if .Size() == 0 {
				// For zero-sized return value, args+off may point to the next object.
				// In this case, return the zero value instead.
				[] = Zero(toRType())
				continue
			}
			 := .ret.stepsForValue()
			if  := [0]; .kind == abiStepStack {
				// This value is on the stack. If part of a value is stack
				// allocated, the entire value is according to the ABI. So
				// just make an indirection into the allocated frame.
				 := flagIndir | flag(.Kind())
				[] = Value{, add(, .stkOff, "tv.Size() != 0"), }
				// Note: this does introduce false sharing between results -
				// if any result is live, they are all live.
				// (And the space for the args is live as well, but as we've
				// cleared that space it isn't as big a deal.)
				continue
			}

			// Handle pointers passed in registers.
			if !.IfaceIndir() {
				// Pointer-valued data gets put directly
				// into v.ptr.
				if [0].kind != abiStepPointer {
					print("kind=", [0].kind, ", type=", stringFor(), "\n")
					panic("mismatch between ABI description and types")
				}
				[] = Value{, .Ptrs[[0].ireg], flag(.Kind())}
				continue
			}

			// All that's left is values passed in registers that we need to
			// create space for and copy values back into.
			//
			// TODO(mknyszek): We make a new allocation for each register-allocated
			// value, but previously we could always point into the heap-allocated
			// stack frame. This is a regression that could be fixed by adding
			// additional space to the allocated stack frame and storing the
			// register-allocated return values into the allocated stack frame and
			// referring there in the resulting Value.
			 := unsafe_New()
			for ,  := range  {
				switch .kind {
				case abiStepIntReg:
					 := add(, .offset, "precomputed value offset")
					intFromReg(&, .ireg, .size, )
				case abiStepPointer:
					 := add(, .offset, "precomputed value offset")
					*((*unsafe.Pointer)()) = .Ptrs[.ireg]
				case abiStepFloatReg:
					 := add(, .offset, "precomputed value offset")
					floatFromReg(&, .freg, .size, )
				case abiStepStack:
					panic("register-based return value has stack component")
				default:
					panic("unknown ABI part kind")
				}
			}
			[] = Value{, , flagIndir | flag(.Kind())}
		}
	}

	return 
}

// callReflect is the call implementation used by a function
// returned by MakeFunc. In many ways it is the opposite of the
// method Value.call above. The method above converts a call using Values
// into a call of a function with a concrete argument frame, while
// callReflect converts a call of a function with a concrete argument
// frame into a call using Values.
// It is in this file so that it can be next to the call method above.
// The remainder of the MakeFunc implementation is in makefunc.go.
//
// NOTE: This function must be marked as a "wrapper" in the generated code,
// so that the linker can make it work correctly for panic and recover.
// The gc compilers know to do that for the name "reflect.callReflect".
//
// ctxt is the "closure" generated by MakeFunc.
// frame is a pointer to the arguments to that closure on the stack.
// retValid points to a boolean which should be set when the results
// section of frame is set.
//
// regs contains the argument values passed in registers and will contain
// the values returned from ctxt.fn in registers.
func callReflect( *makeFuncImpl,  unsafe.Pointer,  *bool,  *abi.RegArgs) {
	if callGC {
		// Call GC upon entry during testing.
		// Getting our stack scanned here is the biggest hazard, because
		// our caller (makeFuncStub) could have failed to place the last
		// pointer to a value in regs' pointer space, in which case it
		// won't be visible to the GC.
		runtime.GC()
	}
	 := .ftyp
	 := .fn

	, ,  := funcLayout(, nil)

	// Copy arguments into Values.
	 := 
	 := make([]Value, 0, int(.InCount))
	for ,  := range .InSlice() {
		if .Size() == 0 {
			 = append(, Zero(toRType()))
			continue
		}
		 := Value{, nil, flag(.Kind())}
		 := .call.stepsForValue()
		if  := [0]; .kind == abiStepStack {
			if .IfaceIndir() {
				// value cannot be inlined in interface data.
				// Must make a copy, because f might keep a reference to it,
				// and we cannot let f keep a reference to the stack frame
				// after this function returns, not even a read-only reference.
				.ptr = unsafe_New()
				if .Size() > 0 {
					typedmemmove(, .ptr, add(, .stkOff, "typ.size > 0"))
				}
				.flag |= flagIndir
			} else {
				.ptr = *(*unsafe.Pointer)(add(, .stkOff, "1-ptr"))
			}
		} else {
			if .IfaceIndir() {
				// All that's left is values passed in registers that we need to
				// create space for the values.
				.flag |= flagIndir
				.ptr = unsafe_New()
				for ,  := range  {
					switch .kind {
					case abiStepIntReg:
						 := add(.ptr, .offset, "precomputed value offset")
						intFromReg(, .ireg, .size, )
					case abiStepPointer:
						 := add(.ptr, .offset, "precomputed value offset")
						*((*unsafe.Pointer)()) = .Ptrs[.ireg]
					case abiStepFloatReg:
						 := add(.ptr, .offset, "precomputed value offset")
						floatFromReg(, .freg, .size, )
					case abiStepStack:
						panic("register-based return value has stack component")
					default:
						panic("unknown ABI part kind")
					}
				}
			} else {
				// Pointer-valued data gets put directly
				// into v.ptr.
				if [0].kind != abiStepPointer {
					print("kind=", [0].kind, ", type=", stringFor(), "\n")
					panic("mismatch between ABI description and types")
				}
				.ptr = .Ptrs[[0].ireg]
			}
		}
		 = append(, )
	}

	// Call underlying function.
	 := ()
	 := .NumOut()
	if len() !=  {
		panic("reflect: wrong return count from function created by MakeFunc")
	}

	// Copy results back into argument frame and register space.
	if  > 0 {
		for ,  := range .OutSlice() {
			 := []
			if .typ() == nil {
				panic("reflect: function created by MakeFunc using " + funcName() +
					" returned zero Value")
			}
			if .flag&flagRO != 0 {
				panic("reflect: function created by MakeFunc using " + funcName() +
					" returned value obtained from unexported field")
			}
			if .Size() == 0 {
				continue
			}

			// Convert v to type typ if v is assignable to a variable
			// of type t in the language spec.
			// See issue 28761.
			//
			//
			// TODO(mknyszek): In the switch to the register ABI we lost
			// the scratch space here for the register cases (and
			// temporarily for all the cases).
			//
			// If/when this happens, take note of the following:
			//
			// We must clear the destination before calling assignTo,
			// in case assignTo writes (with memory barriers) to the
			// target location used as scratch space. See issue 39541.
			 = .assignTo("reflect.MakeFunc", , nil)
		:
			for ,  := range .ret.stepsForValue() {
				switch .kind {
				case abiStepStack:
					// Copy values to the "stack."
					 := add(, .stkOff, "precomputed stack arg offset")
					// Do not use write barriers. The stack space used
					// for this call is not adequately zeroed, and we
					// are careful to keep the arguments alive until we
					// return to makeFuncStub's caller.
					if .flag&flagIndir != 0 {
						memmove(, .ptr, .size)
					} else {
						// This case must be a pointer type.
						*(*uintptr)() = uintptr(.ptr)
					}
					// There's only one step for a stack-allocated value.
					break 
				case abiStepIntReg, abiStepPointer:
					// Copy values to "integer registers."
					if .flag&flagIndir != 0 {
						 := add(.ptr, .offset, "precomputed value offset")
						intToReg(, .ireg, .size, )
					} else {
						// Only populate the Ints space on the return path.
						// This is safe because out is kept alive until the
						// end of this function, and the return path through
						// makeFuncStub has no preemption, so these pointers
						// are always visible to the GC.
						.Ints[.ireg] = uintptr(.ptr)
					}
				case abiStepFloatReg:
					// Copy values to "float registers."
					if .flag&flagIndir == 0 {
						panic("attempted to copy pointer to FP register")
					}
					 := add(.ptr, .offset, "precomputed value offset")
					floatToReg(, .freg, .size, )
				default:
					panic("unknown ABI part kind")
				}
			}
		}
	}

	// Announce that the return values are valid.
	// After this point the runtime can depend on the return values being valid.
	* = true

	// We have to make sure that the out slice lives at least until
	// the runtime knows the return values are valid. Otherwise, the
	// return values might not be scanned by anyone during a GC.
	// (out would be dead, and the return slots not yet alive.)
	runtime.KeepAlive()

	// runtime.getArgInfo expects to be able to find ctxt on the
	// stack when it finds our caller, makeFuncStub. Make sure it
	// doesn't get garbage collected.
	runtime.KeepAlive()
}

// methodReceiver returns information about the receiver
// described by v. The Value v may or may not have the
// flagMethod bit set, so the kind cached in v.flag should
// not be used.
// The return value rcvrtype gives the method's actual receiver type.
// The return value t gives the method type signature (without the receiver).
// The return value fn is a pointer to the method code.
func methodReceiver( string,  Value,  int) ( *abi.Type,  *funcType,  unsafe.Pointer) {
	 := 
	if .typ().Kind() == abi.Interface {
		 := (*interfaceType)(unsafe.Pointer(.typ()))
		if uint() >= uint(len(.Methods)) {
			panic("reflect: internal error: invalid method index")
		}
		 := &.Methods[]
		if !.nameOff(.Name).IsExported() {
			panic("reflect: " +  + " of unexported method")
		}
		 := (*nonEmptyInterface)(.ptr)
		if .itab == nil {
			panic("reflect: " +  + " of method on nil interface value")
		}
		 = .itab.Type
		 = unsafe.Pointer(&unsafe.Slice(&.itab.Fun[0], +1)[])
		 = (*funcType)(unsafe.Pointer(.typeOff(.Typ)))
	} else {
		 = .typ()
		 := .typ().ExportedMethods()
		if uint() >= uint(len()) {
			panic("reflect: internal error: invalid method index")
		}
		 := []
		if !nameOffFor(.typ(), .Name).IsExported() {
			panic("reflect: " +  + " of unexported method")
		}
		 := textOffFor(.typ(), .Ifn)
		 = unsafe.Pointer(&)
		 = (*funcType)(unsafe.Pointer(typeOffFor(.typ(), .Mtyp)))
	}
	return
}

// v is a method receiver. Store at p the word which is used to
// encode that receiver at the start of the argument list.
// Reflect uses the "interface" calling convention for
// methods, which always uses one word to record the receiver.
func storeRcvr( Value,  unsafe.Pointer) {
	 := .typ()
	if .Kind() == abi.Interface {
		// the interface data word becomes the receiver word
		 := (*nonEmptyInterface)(.ptr)
		*(*unsafe.Pointer)() = .word
	} else if .flag&flagIndir != 0 && !.IfaceIndir() {
		*(*unsafe.Pointer)() = *(*unsafe.Pointer)(.ptr)
	} else {
		*(*unsafe.Pointer)() = .ptr
	}
}

// align returns the result of rounding x up to a multiple of n.
// n must be a power of two.
func align(,  uintptr) uintptr {
	return ( +  - 1) &^ ( - 1)
}

// callMethod is the call implementation used by a function returned
// by makeMethodValue (used by v.Method(i).Interface()).
// It is a streamlined version of the usual reflect call: the caller has
// already laid out the argument frame for us, so we don't have
// to deal with individual Values for each argument.
// It is in this file so that it can be next to the two similar functions above.
// The remainder of the makeMethodValue implementation is in makefunc.go.
//
// NOTE: This function must be marked as a "wrapper" in the generated code,
// so that the linker can make it work correctly for panic and recover.
// The gc compilers know to do that for the name "reflect.callMethod".
//
// ctxt is the "closure" generated by makeMethodValue.
// frame is a pointer to the arguments to that closure on the stack.
// retValid points to a boolean which should be set when the results
// section of frame is set.
//
// regs contains the argument values passed in registers and will contain
// the values returned from ctxt.fn in registers.
func callMethod( *methodValue,  unsafe.Pointer,  *bool,  *abi.RegArgs) {
	 := .rcvr
	, ,  := methodReceiver("call", , .method)

	// There are two ABIs at play here.
	//
	// methodValueCall was invoked with the ABI assuming there was no
	// receiver ("value ABI") and that's what frame and regs are holding.
	//
	// Meanwhile, we need to actually call the method with a receiver, which
	// has its own ABI ("method ABI"). Everything that follows is a translation
	// between the two.
	, ,  := funcLayout(, nil)
	,  := , 
	, ,  := funcLayout(, )

	// Make a new frame that is one word bigger so we can store the receiver.
	// This space is used for both arguments and return values.
	 := .Get().(unsafe.Pointer)
	var  abi.RegArgs

	// Deal with the receiver. It's guaranteed to only be one word in size.
	switch  := .call.steps[0]; .kind {
	case abiStepStack:
		// Only copy the receiver to the stack if the ABI says so.
		// Otherwise, it'll be in a register already.
		storeRcvr(, )
	case abiStepPointer:
		// Put the receiver in a register.
		storeRcvr(, unsafe.Pointer(&.Ptrs[.ireg]))
		fallthrough
	case abiStepIntReg:
		storeRcvr(, unsafe.Pointer(&.Ints[.ireg]))
	case abiStepFloatReg:
		storeRcvr(, unsafe.Pointer(&.Floats[.freg]))
	default:
		panic("unknown ABI parameter kind")
	}

	// Translate the rest of the arguments.
	for ,  := range .InSlice() {
		 := .call.stepsForValue()
		 := .call.stepsForValue( + 1)

		// Zero-sized types are trivial: nothing to do.
		if len() == 0 {
			if len() != 0 {
				panic("method ABI and value ABI do not align")
			}
			continue
		}

		// There are four cases to handle in translating each
		// argument:
		// 1. Stack -> stack translation.
		// 2. Stack -> registers translation.
		// 3. Registers -> stack translation.
		// 4. Registers -> registers translation.

		// If the value ABI passes the value on the stack,
		// then the method ABI does too, because it has strictly
		// fewer arguments. Simply copy between the two.
		if  := [0]; .kind == abiStepStack {
			 := [0]
			// Handle stack -> stack translation.
			if .kind == abiStepStack {
				if .size != .size {
					panic("method ABI and value ABI do not align")
				}
				typedmemmove(,
					add(, .stkOff, "precomputed stack offset"),
					add(, .stkOff, "precomputed stack offset"))
				continue
			}
			// Handle stack -> register translation.
			for ,  := range  {
				 := add(, .stkOff+.offset, "precomputed stack offset")
				switch .kind {
				case abiStepPointer:
					// Do the pointer copy directly so we get a write barrier.
					.Ptrs[.ireg] = *(*unsafe.Pointer)()
					fallthrough // We need to make sure this ends up in Ints, too.
				case abiStepIntReg:
					intToReg(&, .ireg, .size, )
				case abiStepFloatReg:
					floatToReg(&, .freg, .size, )
				default:
					panic("unexpected method step")
				}
			}
			continue
		}
		// Handle register -> stack translation.
		if  := [0]; .kind == abiStepStack {
			for ,  := range  {
				 := add(, .stkOff+.offset, "precomputed stack offset")
				switch .kind {
				case abiStepPointer:
					// Do the pointer copy directly so we get a write barrier.
					*(*unsafe.Pointer)() = .Ptrs[.ireg]
				case abiStepIntReg:
					intFromReg(, .ireg, .size, )
				case abiStepFloatReg:
					floatFromReg(, .freg, .size, )
				default:
					panic("unexpected value step")
				}
			}
			continue
		}
		// Handle register -> register translation.
		if len() != len() {
			// Because it's the same type for the value, and it's assigned
			// to registers both times, it should always take up the same
			// number of registers for each ABI.
			panic("method ABI and value ABI don't align")
		}
		for ,  := range  {
			 := []
			if .kind != .kind {
				panic("method ABI and value ABI don't align")
			}
			switch .kind {
			case abiStepPointer:
				// Copy this too, so we get a write barrier.
				.Ptrs[.ireg] = .Ptrs[.ireg]
				fallthrough
			case abiStepIntReg:
				.Ints[.ireg] = .Ints[.ireg]
			case abiStepFloatReg:
				.Floats[.freg] = .Floats[.freg]
			default:
				panic("unexpected value step")
			}
		}
	}

	 := .Size()
	// TODO(mknyszek): Remove this when we no longer have
	// caller reserved spill space.
	 = align(, goarch.PtrSize)
	 += .spill

	// Mark pointers in registers for the return path.
	.ReturnIsPtr = .outRegPtrs

	// Call.
	// Call copies the arguments from scratch to the stack, calls fn,
	// and then copies the results back into scratch.
	call(, , , uint32(.Size()), uint32(.retOffset), uint32(), &)

	// Copy return values.
	//
	// This is somewhat simpler because both ABIs have an identical
	// return value ABI (the types are identical). As a result, register
	// results can simply be copied over. Stack-allocated values are laid
	// out the same, but are at different offsets from the start of the frame
	// Ignore any changes to args.
	// Avoid constructing out-of-bounds pointers if there are no return values.
	// because the arguments may be laid out differently.
	if  != nil {
		* = 
	}
	if  := .Size() - .retOffset;  > 0 {
		 := add(, .retOffset, "valueFrame's size > retOffset")
		 := add(, .retOffset, "methodFrame's size > retOffset")
		// This copies to the stack. Write barriers are not needed.
		memmove(, , )
	}

	// Tell the runtime it can now depend on the return values
	// being properly initialized.
	* = true

	// Clear the scratch space and put it back in the pool.
	// This must happen after the statement above, so that the return
	// values will always be scanned by someone.
	typedmemclr(, )
	.Put()

	// See the comment in callReflect.
	runtime.KeepAlive()

	// Keep valueRegs alive because it may hold live pointer results.
	// The caller (methodValueCall) has it as a stack object, which is only
	// scanned when there is a reference to it.
	runtime.KeepAlive()
}

// funcName returns the name of f, for use in error messages.
func funcName( func([]Value) []Value) string {
	 := *(*uintptr)(unsafe.Pointer(&))
	 := runtime.FuncForPC()
	if  != nil {
		return .Name()
	}
	return "closure"
}

// Cap returns v's capacity.
// It panics if v's Kind is not [Array], [Chan], [Slice] or pointer to [Array].
func ( Value) () int {
	// capNonSlice is split out to keep Cap inlineable for slice kinds.
	if .kind() == Slice {
		return (*unsafeheader.Slice)(.ptr).Cap
	}
	return .capNonSlice()
}

func ( Value) () int {
	 := .kind()
	switch  {
	case Array:
		return .typ().Len()
	case Chan:
		return chancap(.pointer())
	case Ptr:
		if .typ().Elem().Kind() == abi.Array {
			return .typ().Elem().Len()
		}
		panic("reflect: call of reflect.Value.Cap on ptr to non-array Value")
	}
	panic(&ValueError{"reflect.Value.Cap", .kind()})
}

// Close closes the channel v.
// It panics if v's Kind is not [Chan] or
// v is a receive-only channel.
func ( Value) () {
	.mustBe(Chan)
	.mustBeExported()
	 := (*chanType)(unsafe.Pointer(.typ()))
	if ChanDir(.Dir)&SendDir == 0 {
		panic("reflect: close of receive-only channel")
	}

	chanclose(.pointer())
}

// CanComplex reports whether [Value.Complex] can be used without panicking.
func ( Value) () bool {
	switch .kind() {
	case Complex64, Complex128:
		return true
	default:
		return false
	}
}

// Complex returns v's underlying value, as a complex128.
// It panics if v's Kind is not [Complex64] or [Complex128]
func ( Value) () complex128 {
	 := .kind()
	switch  {
	case Complex64:
		return complex128(*(*complex64)(.ptr))
	case Complex128:
		return *(*complex128)(.ptr)
	}
	panic(&ValueError{"reflect.Value.Complex", .kind()})
}

// Elem returns the value that the interface v contains
// or that the pointer v points to.
// It panics if v's Kind is not [Interface] or [Pointer].
// It returns the zero Value if v is nil.
func ( Value) () Value {
	 := .kind()
	switch  {
	case Interface:
		var  any
		if .typ().NumMethod() == 0 {
			 = *(*any)(.ptr)
		} else {
			 = (any)(*(*interface {
				()
			})(.ptr))
		}
		 := unpackEface()
		if .flag != 0 {
			.flag |= .flag.ro()
		}
		return 
	case Pointer:
		 := .ptr
		if .flag&flagIndir != 0 {
			if .typ().IfaceIndir() {
				// This is a pointer to a not-in-heap object. ptr points to a uintptr
				// in the heap. That uintptr is the address of a not-in-heap object.
				// In general, pointers to not-in-heap objects can be total junk.
				// But Elem() is asking to dereference it, so the user has asserted
				// that at least it is a valid pointer (not just an integer stored in
				// a pointer slot). So let's check, to make sure that it isn't a pointer
				// that the runtime will crash on if it sees it during GC or write barriers.
				// Since it is a not-in-heap pointer, all pointers to the heap are
				// forbidden! That makes the test pretty easy.
				// See issue 48399.
				if !verifyNotInHeapPtr(*(*uintptr)()) {
					panic("reflect: reflect.Value.Elem on an invalid notinheap pointer")
				}
			}
			 = *(*unsafe.Pointer)()
		}
		// The returned value's address is v's value.
		if  == nil {
			return Value{}
		}
		 := (*ptrType)(unsafe.Pointer(.typ()))
		 := .Elem
		 := .flag&flagRO | flagIndir | flagAddr
		 |= flag(.Kind())
		return Value{, , }
	}
	panic(&ValueError{"reflect.Value.Elem", .kind()})
}

// Field returns the i'th field of the struct v.
// It panics if v's Kind is not [Struct] or i is out of range.
func ( Value) ( int) Value {
	if .kind() != Struct {
		panic(&ValueError{"reflect.Value.Field", .kind()})
	}
	 := (*structType)(unsafe.Pointer(.typ()))
	if uint() >= uint(len(.Fields)) {
		panic("reflect: Field index out of range")
	}
	 := &.Fields[]
	 := .Typ

	// Inherit permission bits from v, but clear flagEmbedRO.
	 := .flag&(flagStickyRO|flagIndir|flagAddr) | flag(.Kind())
	// Using an unexported field forces flagRO.
	if !.Name.IsExported() {
		if .Embedded() {
			 |= flagEmbedRO
		} else {
			 |= flagStickyRO
		}
	}
	// Either flagIndir is set and v.ptr points at struct,
	// or flagIndir is not set and v.ptr is the actual struct data.
	// In the former case, we want v.ptr + offset.
	// In the latter case, we must have field.offset = 0,
	// so v.ptr + field.offset is still the correct address.
	 := add(.ptr, .Offset, "same as non-reflect &v.field")
	return Value{, , }
}

// FieldByIndex returns the nested field corresponding to index.
// It panics if evaluation requires stepping through a nil
// pointer or a field that is not a struct.
func ( Value) ( []int) Value {
	if len() == 1 {
		return .Field([0])
	}
	.mustBe(Struct)
	for ,  := range  {
		if  > 0 {
			if .Kind() == Pointer && .typ().Elem().Kind() == abi.Struct {
				if .IsNil() {
					panic("reflect: indirection through nil pointer to embedded struct")
				}
				 = .Elem()
			}
		}
		 = .Field()
	}
	return 
}

// FieldByIndexErr returns the nested field corresponding to index.
// It returns an error if evaluation requires stepping through a nil
// pointer, but panics if it must step through a field that
// is not a struct.
func ( Value) ( []int) (Value, error) {
	if len() == 1 {
		return .Field([0]), nil
	}
	.mustBe(Struct)
	for ,  := range  {
		if  > 0 {
			if .Kind() == Ptr && .typ().Elem().Kind() == abi.Struct {
				if .IsNil() {
					return Value{}, errors.New("reflect: indirection through nil pointer to embedded struct field " + nameFor(.typ().Elem()))
				}
				 = .Elem()
			}
		}
		 = .Field()
	}
	return , nil
}

// FieldByName returns the struct field with the given name.
// It returns the zero Value if no field was found.
// It panics if v's Kind is not [Struct].
func ( Value) ( string) Value {
	.mustBe(Struct)
	if ,  := toRType(.typ()).FieldByName();  {
		return .FieldByIndex(.Index)
	}
	return Value{}
}

// FieldByNameFunc returns the struct field with a name
// that satisfies the match function.
// It panics if v's Kind is not [Struct].
// It returns the zero Value if no field was found.
func ( Value) ( func(string) bool) Value {
	if ,  := toRType(.typ()).FieldByNameFunc();  {
		return .FieldByIndex(.Index)
	}
	return Value{}
}

// CanFloat reports whether [Value.Float] can be used without panicking.
func ( Value) () bool {
	switch .kind() {
	case Float32, Float64:
		return true
	default:
		return false
	}
}

// Float returns v's underlying value, as a float64.
// It panics if v's Kind is not [Float32] or [Float64]
func ( Value) () float64 {
	 := .kind()
	switch  {
	case Float32:
		return float64(*(*float32)(.ptr))
	case Float64:
		return *(*float64)(.ptr)
	}
	panic(&ValueError{"reflect.Value.Float", .kind()})
}

var uint8Type = rtypeOf(uint8(0))

// Index returns v's i'th element.
// It panics if v's Kind is not [Array], [Slice], or [String] or i is out of range.
func ( Value) ( int) Value {
	switch .kind() {
	case Array:
		 := (*arrayType)(unsafe.Pointer(.typ()))
		if uint() >= uint(.Len) {
			panic("reflect: array index out of range")
		}
		 := .Elem
		 := uintptr() * .Size()

		// Either flagIndir is set and v.ptr points at array,
		// or flagIndir is not set and v.ptr is the actual array data.
		// In the former case, we want v.ptr + offset.
		// In the latter case, we must be doing Index(0), so offset = 0,
		// so v.ptr + offset is still the correct address.
		 := add(.ptr, , "same as &v[i], i < tt.len")
		 := .flag&(flagIndir|flagAddr) | .flag.ro() | flag(.Kind()) // bits same as overall array
		return Value{, , }

	case Slice:
		// Element flag same as Elem of Pointer.
		// Addressable, indirect, possibly read-only.
		 := (*unsafeheader.Slice)(.ptr)
		if uint() >= uint(.Len) {
			panic("reflect: slice index out of range")
		}
		 := (*sliceType)(unsafe.Pointer(.typ()))
		 := .Elem
		 := arrayAt(.Data, , .Size(), "i < s.Len")
		 := flagAddr | flagIndir | .flag.ro() | flag(.Kind())
		return Value{, , }

	case String:
		 := (*unsafeheader.String)(.ptr)
		if uint() >= uint(.Len) {
			panic("reflect: string index out of range")
		}
		 := arrayAt(.Data, , 1, "i < s.Len")
		 := .flag.ro() | flag(Uint8) | flagIndir
		return Value{uint8Type, , }
	}
	panic(&ValueError{"reflect.Value.Index", .kind()})
}

// CanInt reports whether Int can be used without panicking.
func ( Value) () bool {
	switch .kind() {
	case Int, Int8, Int16, Int32, Int64:
		return true
	default:
		return false
	}
}

// Int returns v's underlying value, as an int64.
// It panics if v's Kind is not [Int], [Int8], [Int16], [Int32], or [Int64].
func ( Value) () int64 {
	 := .kind()
	 := .ptr
	switch  {
	case Int:
		return int64(*(*int)())
	case Int8:
		return int64(*(*int8)())
	case Int16:
		return int64(*(*int16)())
	case Int32:
		return int64(*(*int32)())
	case Int64:
		return *(*int64)()
	}
	panic(&ValueError{"reflect.Value.Int", .kind()})
}

// CanInterface reports whether [Value.Interface] can be used without panicking.
func ( Value) () bool {
	if .flag == 0 {
		panic(&ValueError{"reflect.Value.CanInterface", Invalid})
	}
	return .flag&flagRO == 0
}

// Interface returns v's current value as an interface{}.
// It is equivalent to:
//
//	var i interface{} = (v's underlying value)
//
// It panics if the Value was obtained by accessing
// unexported struct fields.
func ( Value) () ( any) {
	return valueInterface(, true)
}

func valueInterface( Value,  bool) any {
	if .flag == 0 {
		panic(&ValueError{"reflect.Value.Interface", Invalid})
	}
	if  && .flag&flagRO != 0 {
		// Do not allow access to unexported values via Interface,
		// because they might be pointers that should not be
		// writable or methods or function that should not be callable.
		panic("reflect.Value.Interface: cannot return value obtained from unexported field or method")
	}
	if .flag&flagMethod != 0 {
		 = makeMethodValue("Interface", )
	}

	if .kind() == Interface {
		// Special case: return the element inside the interface.
		// Empty interface has one layout, all interfaces with
		// methods have a second layout.
		if .NumMethod() == 0 {
			return *(*any)(.ptr)
		}
		return *(*interface {
			()
		})(.ptr)
	}

	return packEface()
}

// InterfaceData returns a pair of unspecified uintptr values.
// It panics if v's Kind is not Interface.
//
// In earlier versions of Go, this function returned the interface's
// value as a uintptr pair. As of Go 1.4, the implementation of
// interface values precludes any defined use of InterfaceData.
//
// Deprecated: The memory representation of interface values is not
// compatible with InterfaceData.
func ( Value) () [2]uintptr {
	.mustBe(Interface)
	// The compiler loses track as it converts to uintptr. Force escape.
	escapes(.ptr)
	// We treat this as a read operation, so we allow
	// it even for unexported data, because the caller
	// has to import "unsafe" to turn it into something
	// that can be abused.
	// Interface value is always bigger than a word; assume flagIndir.
	return *(*[2]uintptr)(.ptr)
}

// IsNil reports whether its argument v is nil. The argument must be
// a chan, func, interface, map, pointer, or slice value; if it is
// not, IsNil panics. Note that IsNil is not always equivalent to a
// regular comparison with nil in Go. For example, if v was created
// by calling [ValueOf] with an uninitialized interface variable i,
// i==nil will be true but v.IsNil will panic as v will be the zero
// Value.
func ( Value) () bool {
	 := .kind()
	switch  {
	case Chan, Func, Map, Pointer, UnsafePointer:
		if .flag&flagMethod != 0 {
			return false
		}
		 := .ptr
		if .flag&flagIndir != 0 {
			 = *(*unsafe.Pointer)()
		}
		return  == nil
	case Interface, Slice:
		// Both interface and slice are nil if first word is 0.
		// Both are always bigger than a word; assume flagIndir.
		return *(*unsafe.Pointer)(.ptr) == nil
	}
	panic(&ValueError{"reflect.Value.IsNil", .kind()})
}

// IsValid reports whether v represents a value.
// It returns false if v is the zero Value.
// If [Value.IsValid] returns false, all other methods except String panic.
// Most functions and methods never return an invalid Value.
// If one does, its documentation states the conditions explicitly.
func ( Value) () bool {
	return .flag != 0
}

// IsZero reports whether v is the zero value for its type.
// It panics if the argument is invalid.
func ( Value) () bool {
	switch .kind() {
	case Bool:
		return !.Bool()
	case Int, Int8, Int16, Int32, Int64:
		return .Int() == 0
	case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
		return .Uint() == 0
	case Float32, Float64:
		return .Float() == 0
	case Complex64, Complex128:
		return .Complex() == 0
	case Array:
		if .flag&flagIndir == 0 {
			return .ptr == nil
		}
		 := (*abi.ArrayType)(unsafe.Pointer(.typ()))
		// If the type is comparable, then compare directly with zero.
		if .Equal != nil && .Size() <= abi.ZeroValSize {
			// v.ptr doesn't escape, as Equal functions are compiler generated
			// and never escape. The escape analysis doesn't know, as it is a
			// function pointer call.
			return .Equal(abi.NoEscape(.ptr), unsafe.Pointer(&zeroVal[0]))
		}
		if .TFlag&abi.TFlagRegularMemory != 0 {
			// For some types where the zero value is a value where all bits of this type are 0
			// optimize it.
			return isZero(unsafe.Slice(((*byte)(.ptr)), .Size()))
		}
		 := int(.Len)
		for  := 0;  < ; ++ {
			if !.Index().() {
				return false
			}
		}
		return true
	case Chan, Func, Interface, Map, Pointer, Slice, UnsafePointer:
		return .IsNil()
	case String:
		return .Len() == 0
	case Struct:
		if .flag&flagIndir == 0 {
			return .ptr == nil
		}
		 := (*abi.StructType)(unsafe.Pointer(.typ()))
		// If the type is comparable, then compare directly with zero.
		if .Equal != nil && .Size() <= abi.ZeroValSize {
			// See noescape justification above.
			return .Equal(abi.NoEscape(.ptr), unsafe.Pointer(&zeroVal[0]))
		}
		if .TFlag&abi.TFlagRegularMemory != 0 {
			// For some types where the zero value is a value where all bits of this type are 0
			// optimize it.
			return isZero(unsafe.Slice(((*byte)(.ptr)), .Size()))
		}

		 := .NumField()
		for  := 0;  < ; ++ {
			if !.Field().() && .Type().Field().Name != "_" {
				return false
			}
		}
		return true
	default:
		// This should never happen, but will act as a safeguard for later,
		// as a default value doesn't makes sense here.
		panic(&ValueError{"reflect.Value.IsZero", .Kind()})
	}
}

// isZero For all zeros, performance is not as good as
// return bytealg.Count(b, byte(0)) == len(b)
func isZero( []byte) bool {
	if len() == 0 {
		return true
	}
	const  = 32
	// Align memory addresses to 8 bytes.
	for uintptr(unsafe.Pointer(&[0]))%8 != 0 {
		if [0] != 0 {
			return false
		}
		 = [1:]
		if len() == 0 {
			return true
		}
	}
	for len()%8 != 0 {
		if [len()-1] != 0 {
			return false
		}
		 = [:len()-1]
	}
	if len() == 0 {
		return true
	}
	 := unsafe.Slice((*uint64)(unsafe.Pointer(&[0])), len()/8)
	for len()% != 0 {
		if [0] != 0 {
			return false
		}
		 = [1:]
	}
	for len() >=  {
		if [0] != 0 || [1] != 0 || [2] != 0 || [3] != 0 ||
			[4] != 0 || [5] != 0 || [6] != 0 || [7] != 0 ||
			[8] != 0 || [9] != 0 || [10] != 0 || [11] != 0 ||
			[12] != 0 || [13] != 0 || [14] != 0 || [15] != 0 ||
			[16] != 0 || [17] != 0 || [18] != 0 || [19] != 0 ||
			[20] != 0 || [21] != 0 || [22] != 0 || [23] != 0 ||
			[24] != 0 || [25] != 0 || [26] != 0 || [27] != 0 ||
			[28] != 0 || [29] != 0 || [30] != 0 || [31] != 0 {
			return false
		}
		 = [:]
	}
	return true
}

// SetZero sets v to be the zero value of v's type.
// It panics if [Value.CanSet] returns false.
func ( Value) () {
	.mustBeAssignable()
	switch .kind() {
	case Bool:
		*(*bool)(.ptr) = false
	case Int:
		*(*int)(.ptr) = 0
	case Int8:
		*(*int8)(.ptr) = 0
	case Int16:
		*(*int16)(.ptr) = 0
	case Int32:
		*(*int32)(.ptr) = 0
	case Int64:
		*(*int64)(.ptr) = 0
	case Uint:
		*(*uint)(.ptr) = 0
	case Uint8:
		*(*uint8)(.ptr) = 0
	case Uint16:
		*(*uint16)(.ptr) = 0
	case Uint32:
		*(*uint32)(.ptr) = 0
	case Uint64:
		*(*uint64)(.ptr) = 0
	case Uintptr:
		*(*uintptr)(.ptr) = 0
	case Float32:
		*(*float32)(.ptr) = 0
	case Float64:
		*(*float64)(.ptr) = 0
	case Complex64:
		*(*complex64)(.ptr) = 0
	case Complex128:
		*(*complex128)(.ptr) = 0
	case String:
		*(*string)(.ptr) = ""
	case Slice:
		*(*unsafeheader.Slice)(.ptr) = unsafeheader.Slice{}
	case Interface:
		*(*abi.EmptyInterface)(.ptr) = abi.EmptyInterface{}
	case Chan, Func, Map, Pointer, UnsafePointer:
		*(*unsafe.Pointer)(.ptr) = nil
	case Array, Struct:
		typedmemclr(.typ(), .ptr)
	default:
		// This should never happen, but will act as a safeguard for later,
		// as a default value doesn't makes sense here.
		panic(&ValueError{"reflect.Value.SetZero", .Kind()})
	}
}

// Kind returns v's Kind.
// If v is the zero Value ([Value.IsValid] returns false), Kind returns Invalid.
func ( Value) () Kind {
	return .kind()
}

// Len returns v's length.
// It panics if v's Kind is not [Array], [Chan], [Map], [Slice], [String], or pointer to [Array].
func ( Value) () int {
	// lenNonSlice is split out to keep Len inlineable for slice kinds.
	if .kind() == Slice {
		return (*unsafeheader.Slice)(.ptr).Len
	}
	return .lenNonSlice()
}

func ( Value) () int {
	switch  := .kind();  {
	case Array:
		 := (*arrayType)(unsafe.Pointer(.typ()))
		return int(.Len)
	case Chan:
		return chanlen(.pointer())
	case Map:
		return maplen(.pointer())
	case String:
		// String is bigger than a word; assume flagIndir.
		return (*unsafeheader.String)(.ptr).Len
	case Ptr:
		if .typ().Elem().Kind() == abi.Array {
			return .typ().Elem().Len()
		}
		panic("reflect: call of reflect.Value.Len on ptr to non-array Value")
	}
	panic(&ValueError{"reflect.Value.Len", .kind()})
}

var stringType = rtypeOf("")

// MapIndex returns the value associated with key in the map v.
// It panics if v's Kind is not [Map].
// It returns the zero Value if key is not found in the map or if v represents a nil map.
// As in Go, the key's value must be assignable to the map's key type.
func ( Value) ( Value) Value {
	.mustBe(Map)
	 := (*mapType)(unsafe.Pointer(.typ()))

	// Do not require key to be exported, so that DeepEqual
	// and other programs can use all the keys returned by
	// MapKeys as arguments to MapIndex. If either the map
	// or the key is unexported, though, the result will be
	// considered unexported. This is consistent with the
	// behavior for structs, which allow read but not write
	// of unexported fields.

	var  unsafe.Pointer
	if (.Key == stringType || .kind() == String) && .Key == .typ() && .Elem.Size() <= abi.MapMaxElemBytes {
		 := *(*string)(.ptr)
		 = mapaccess_faststr(.typ(), .pointer(), )
	} else {
		 = .assignTo("reflect.Value.MapIndex", .Key, nil)
		var  unsafe.Pointer
		if .flag&flagIndir != 0 {
			 = .ptr
		} else {
			 = unsafe.Pointer(&.ptr)
		}
		 = mapaccess(.typ(), .pointer(), )
	}
	if  == nil {
		return Value{}
	}
	 := .Elem
	 := (.flag | .flag).ro()
	 |= flag(.Kind())
	return copyVal(, , )
}

// MapKeys returns a slice containing all the keys present in the map,
// in unspecified order.
// It panics if v's Kind is not [Map].
// It returns an empty slice if v represents a nil map.
func ( Value) () []Value {
	.mustBe(Map)
	 := (*mapType)(unsafe.Pointer(.typ()))
	 := .Key

	 := .flag.ro() | flag(.Kind())

	 := .pointer()
	 := int(0)
	if  != nil {
		 = maplen()
	}
	var  hiter
	mapiterinit(.typ(), , &)
	 := make([]Value, )
	var  int
	for  = 0;  < len(); ++ {
		 := mapiterkey(&)
		if  == nil {
			// Someone deleted an entry from the map since we
			// called maplen above. It's a data race, but nothing
			// we can do about it.
			break
		}
		[] = copyVal(, , )
		mapiternext(&)
	}
	return [:]
}

// hiter's structure matches runtime.hiter's structure.
// Having a clone here allows us to embed a map iterator
// inside type MapIter so that MapIters can be re-used
// without doing any allocations.
type hiter struct {
	key         unsafe.Pointer
	elem        unsafe.Pointer
	t           unsafe.Pointer
	h           unsafe.Pointer
	buckets     unsafe.Pointer
	bptr        unsafe.Pointer
	overflow    *[]unsafe.Pointer
	oldoverflow *[]unsafe.Pointer
	startBucket uintptr
	offset      uint8
	wrapped     bool
	B           uint8
	i           uint8
	bucket      uintptr
	checkBucket uintptr
}

func ( *hiter) () bool {
	return .t != nil
}

// A MapIter is an iterator for ranging over a map.
// See [Value.MapRange].
type MapIter struct {
	m     Value
	hiter hiter
}

// Key returns the key of iter's current map entry.
func ( *MapIter) () Value {
	if !.hiter.initialized() {
		panic("MapIter.Key called before Next")
	}
	 := mapiterkey(&.hiter)
	if  == nil {
		panic("MapIter.Key called on exhausted iterator")
	}

	 := (*mapType)(unsafe.Pointer(.m.typ()))
	 := .Key
	return copyVal(, .m.flag.ro()|flag(.Kind()), )
}

// SetIterKey assigns to v the key of iter's current map entry.
// It is equivalent to v.Set(iter.Key()), but it avoids allocating a new Value.
// As in Go, the key must be assignable to v's type and
// must not be derived from an unexported field.
func ( Value) ( *MapIter) {
	if !.hiter.initialized() {
		panic("reflect: Value.SetIterKey called before Next")
	}
	 := mapiterkey(&.hiter)
	if  == nil {
		panic("reflect: Value.SetIterKey called on exhausted iterator")
	}

	.mustBeAssignable()
	var  unsafe.Pointer
	if .kind() == Interface {
		 = .ptr
	}

	 := (*mapType)(unsafe.Pointer(.m.typ()))
	 := .Key

	.m.mustBeExported() // do not let unexported m leak
	 := Value{, , .m.flag | flag(.Kind()) | flagIndir}
	 = .assignTo("reflect.MapIter.SetKey", .typ(), )
	typedmemmove(.typ(), .ptr, .ptr)
}

// Value returns the value of iter's current map entry.
func ( *MapIter) () Value {
	if !.hiter.initialized() {
		panic("MapIter.Value called before Next")
	}
	 := mapiterelem(&.hiter)
	if  == nil {
		panic("MapIter.Value called on exhausted iterator")
	}

	 := (*mapType)(unsafe.Pointer(.m.typ()))
	 := .Elem
	return copyVal(, .m.flag.ro()|flag(.Kind()), )
}

// SetIterValue assigns to v the value of iter's current map entry.
// It is equivalent to v.Set(iter.Value()), but it avoids allocating a new Value.
// As in Go, the value must be assignable to v's type and
// must not be derived from an unexported field.
func ( Value) ( *MapIter) {
	if !.hiter.initialized() {
		panic("reflect: Value.SetIterValue called before Next")
	}
	 := mapiterelem(&.hiter)
	if  == nil {
		panic("reflect: Value.SetIterValue called on exhausted iterator")
	}

	.mustBeAssignable()
	var  unsafe.Pointer
	if .kind() == Interface {
		 = .ptr
	}

	 := (*mapType)(unsafe.Pointer(.m.typ()))
	 := .Elem

	.m.mustBeExported() // do not let unexported m leak
	 := Value{, , .m.flag | flag(.Kind()) | flagIndir}
	 = .assignTo("reflect.MapIter.SetValue", .typ(), )
	typedmemmove(.typ(), .ptr, .ptr)
}

// Next advances the map iterator and reports whether there is another
// entry. It returns false when iter is exhausted; subsequent
// calls to [MapIter.Key], [MapIter.Value], or [MapIter.Next] will panic.
func ( *MapIter) () bool {
	if !.m.IsValid() {
		panic("MapIter.Next called on an iterator that does not have an associated map Value")
	}
	if !.hiter.initialized() {
		mapiterinit(.m.typ(), .m.pointer(), &.hiter)
	} else {
		if mapiterkey(&.hiter) == nil {
			panic("MapIter.Next called on exhausted iterator")
		}
		mapiternext(&.hiter)
	}
	return mapiterkey(&.hiter) != nil
}

// Reset modifies iter to iterate over v.
// It panics if v's Kind is not [Map] and v is not the zero Value.
// Reset(Value{}) causes iter to not to refer to any map,
// which may allow the previously iterated-over map to be garbage collected.
func ( *MapIter) ( Value) {
	if .IsValid() {
		.mustBe(Map)
	}
	.m = 
	.hiter = hiter{}
}

// MapRange returns a range iterator for a map.
// It panics if v's Kind is not [Map].
//
// Call [MapIter.Next] to advance the iterator, and [MapIter.Key]/[MapIter.Value] to access each entry.
// [MapIter.Next] returns false when the iterator is exhausted.
// MapRange follows the same iteration semantics as a range statement.
//
// Example:
//
//	iter := reflect.ValueOf(m).MapRange()
//	for iter.Next() {
//		k := iter.Key()
//		v := iter.Value()
//		...
//	}
func ( Value) () *MapIter {
	// This is inlinable to take advantage of "function outlining".
	// The allocation of MapIter can be stack allocated if the caller
	// does not allow it to escape.
	// See https://blog.filippo.io/efficient-go-apis-with-the-inliner/
	if .kind() != Map {
		.panicNotMap()
	}
	return &MapIter{m: }
}

// Force slow panicking path not inlined, so it won't add to the
// inlining budget of the caller.
// TODO: undo when the inliner is no longer bottom-up only.
//
//go:noinline
func ( flag) () {
	.mustBe(Map)
}

// copyVal returns a Value containing the map key or value at ptr,
// allocating a new variable as needed.
func copyVal( *abi.Type,  flag,  unsafe.Pointer) Value {
	if .IfaceIndir() {
		// Copy result so future changes to the map
		// won't change the underlying value.
		 := unsafe_New()
		typedmemmove(, , )
		return Value{, ,  | flagIndir}
	}
	return Value{, *(*unsafe.Pointer)(), }
}

// Method returns a function value corresponding to v's i'th method.
// The arguments to a Call on the returned function should not include
// a receiver; the returned function will always use v as the receiver.
// Method panics if i is out of range or if v is a nil interface value.
func ( Value) ( int) Value {
	if .typ() == nil {
		panic(&ValueError{"reflect.Value.Method", Invalid})
	}
	if .flag&flagMethod != 0 || uint() >= uint(toRType(.typ()).NumMethod()) {
		panic("reflect: Method index out of range")
	}
	if .typ().Kind() == abi.Interface && .IsNil() {
		panic("reflect: Method on nil interface value")
	}
	 := .flag.ro() | (.flag & flagIndir)
	 |= flag(Func)
	 |= flag()<<flagMethodShift | flagMethod
	return Value{.typ(), .ptr, }
}

// NumMethod returns the number of methods in the value's method set.
//
// For a non-interface type, it returns the number of exported methods.
//
// For an interface type, it returns the number of exported and unexported methods.
func ( Value) () int {
	if .typ() == nil {
		panic(&ValueError{"reflect.Value.NumMethod", Invalid})
	}
	if .flag&flagMethod != 0 {
		return 0
	}
	return toRType(.typ()).NumMethod()
}

// MethodByName returns a function value corresponding to the method
// of v with the given name.
// The arguments to a Call on the returned function should not include
// a receiver; the returned function will always use v as the receiver.
// It returns the zero Value if no method was found.
func ( Value) ( string) Value {
	if .typ() == nil {
		panic(&ValueError{"reflect.Value.MethodByName", Invalid})
	}
	if .flag&flagMethod != 0 {
		return Value{}
	}
	,  := toRType(.typ()).MethodByName()
	if ! {
		return Value{}
	}
	return .Method(.Index)
}

// NumField returns the number of fields in the struct v.
// It panics if v's Kind is not [Struct].
func ( Value) () int {
	.mustBe(Struct)
	 := (*structType)(unsafe.Pointer(.typ()))
	return len(.Fields)
}

// OverflowComplex reports whether the complex128 x cannot be represented by v's type.
// It panics if v's Kind is not [Complex64] or [Complex128].
func ( Value) ( complex128) bool {
	 := .kind()
	switch  {
	case Complex64:
		return overflowFloat32(real()) || overflowFloat32(imag())
	case Complex128:
		return false
	}
	panic(&ValueError{"reflect.Value.OverflowComplex", .kind()})
}

// OverflowFloat reports whether the float64 x cannot be represented by v's type.
// It panics if v's Kind is not [Float32] or [Float64].
func ( Value) ( float64) bool {
	 := .kind()
	switch  {
	case Float32:
		return overflowFloat32()
	case Float64:
		return false
	}
	panic(&ValueError{"reflect.Value.OverflowFloat", .kind()})
}

func overflowFloat32( float64) bool {
	if  < 0 {
		 = -
	}
	return math.MaxFloat32 <  &&  <= math.MaxFloat64
}

// OverflowInt reports whether the int64 x cannot be represented by v's type.
// It panics if v's Kind is not [Int], [Int8], [Int16], [Int32], or [Int64].
func ( Value) ( int64) bool {
	 := .kind()
	switch  {
	case Int, Int8, Int16, Int32, Int64:
		 := .typ().Size() * 8
		 := ( << (64 - )) >> (64 - )
		return  != 
	}
	panic(&ValueError{"reflect.Value.OverflowInt", .kind()})
}

// OverflowUint reports whether the uint64 x cannot be represented by v's type.
// It panics if v's Kind is not [Uint], [Uintptr], [Uint8], [Uint16], [Uint32], or [Uint64].
func ( Value) ( uint64) bool {
	 := .kind()
	switch  {
	case Uint, Uintptr, Uint8, Uint16, Uint32, Uint64:
		 := .typ_.Size() * 8 // ok to use v.typ_ directly as Size doesn't escape
		 := ( << (64 - )) >> (64 - )
		return  != 
	}
	panic(&ValueError{"reflect.Value.OverflowUint", .kind()})
}

//go:nocheckptr
// This prevents inlining Value.Pointer when -d=checkptr is enabled,
// which ensures cmd/compile can recognize unsafe.Pointer(v.Pointer())
// and make an exception.

// Pointer returns v's value as a uintptr.
// It panics if v's Kind is not [Chan], [Func], [Map], [Pointer], [Slice], [String], or [UnsafePointer].
//
// If v's Kind is [Func], the returned pointer is an underlying
// code pointer, but not necessarily enough to identify a
// single function uniquely. The only guarantee is that the
// result is zero if and only if v is a nil func Value.
//
// If v's Kind is [Slice], the returned pointer is to the first
// element of the slice. If the slice is nil the returned value
// is 0.  If the slice is empty but non-nil the return value is non-zero.
//
// If v's Kind is [String], the returned pointer is to the first
// element of the underlying bytes of string.
//
// It's preferred to use uintptr(Value.UnsafePointer()) to get the equivalent result.
func ( Value) () uintptr {
	// The compiler loses track as it converts to uintptr. Force escape.
	escapes(.ptr)

	 := .kind()
	switch  {
	case Pointer:
		if !.typ().Pointers() {
			 := *(*uintptr)(.ptr)
			// Since it is a not-in-heap pointer, all pointers to the heap are
			// forbidden! See comment in Value.Elem and issue #48399.
			if !verifyNotInHeapPtr() {
				panic("reflect: reflect.Value.Pointer on an invalid notinheap pointer")
			}
			return 
		}
		fallthrough
	case Chan, Map, UnsafePointer:
		return uintptr(.pointer())
	case Func:
		if .flag&flagMethod != 0 {
			// As the doc comment says, the returned pointer is an
			// underlying code pointer but not necessarily enough to
			// identify a single function uniquely. All method expressions
			// created via reflect have the same underlying code pointer,
			// so their Pointers are equal. The function used here must
			// match the one used in makeMethodValue.
			return methodValueCallCodePtr()
		}
		 := .pointer()
		// Non-nil func value points at data block.
		// First word of data block is actual code.
		if  != nil {
			 = *(*unsafe.Pointer)()
		}
		return uintptr()
	case Slice:
		return uintptr((*unsafeheader.Slice)(.ptr).Data)
	case String:
		return uintptr((*unsafeheader.String)(.ptr).Data)
	}
	panic(&ValueError{"reflect.Value.Pointer", .kind()})
}

// Recv receives and returns a value from the channel v.
// It panics if v's Kind is not [Chan].
// The receive blocks until a value is ready.
// The boolean value ok is true if the value x corresponds to a send
// on the channel, false if it is a zero value received because the channel is closed.
func ( Value) () ( Value,  bool) {
	.mustBe(Chan)
	.mustBeExported()
	return .recv(false)
}

// internal recv, possibly non-blocking (nb).
// v is known to be a channel.
func ( Value) ( bool) ( Value,  bool) {
	 := (*chanType)(unsafe.Pointer(.typ()))
	if ChanDir(.Dir)&RecvDir == 0 {
		panic("reflect: recv on send-only channel")
	}
	 := .Elem
	 = Value{, nil, flag(.Kind())}
	var  unsafe.Pointer
	if .IfaceIndir() {
		 = unsafe_New()
		.ptr = 
		.flag |= flagIndir
	} else {
		 = unsafe.Pointer(&.ptr)
	}
	,  := chanrecv(.pointer(), , )
	if ! {
		 = Value{}
	}
	return
}

// Send sends x on the channel v.
// It panics if v's kind is not [Chan] or if x's type is not the same type as v's element type.
// As in Go, x's value must be assignable to the channel's element type.
func ( Value) ( Value) {
	.mustBe(Chan)
	.mustBeExported()
	.send(, false)
}

// internal send, possibly non-blocking.
// v is known to be a channel.
func ( Value) ( Value,  bool) ( bool) {
	 := (*chanType)(unsafe.Pointer(.typ()))
	if ChanDir(.Dir)&SendDir == 0 {
		panic("reflect: send on recv-only channel")
	}
	.mustBeExported()
	 = .assignTo("reflect.Value.Send", .Elem, nil)
	var  unsafe.Pointer
	if .flag&flagIndir != 0 {
		 = .ptr
	} else {
		 = unsafe.Pointer(&.ptr)
	}
	return chansend(.pointer(), , )
}

// Set assigns x to the value v.
// It panics if [Value.CanSet] returns false.
// As in Go, x's value must be assignable to v's type and
// must not be derived from an unexported field.
func ( Value) ( Value) {
	.mustBeAssignable()
	.mustBeExported() // do not let unexported x leak
	var  unsafe.Pointer
	if .kind() == Interface {
		 = .ptr
	}
	 = .assignTo("reflect.Set", .typ(), )
	if .flag&flagIndir != 0 {
		if .ptr == unsafe.Pointer(&zeroVal[0]) {
			typedmemclr(.typ(), .ptr)
		} else {
			typedmemmove(.typ(), .ptr, .ptr)
		}
	} else {
		*(*unsafe.Pointer)(.ptr) = .ptr
	}
}

// SetBool sets v's underlying value.
// It panics if v's Kind is not [Bool] or if [Value.CanSet] returns false.
func ( Value) ( bool) {
	.mustBeAssignable()
	.mustBe(Bool)
	*(*bool)(.ptr) = 
}

// SetBytes sets v's underlying value.
// It panics if v's underlying value is not a slice of bytes.
func ( Value) ( []byte) {
	.mustBeAssignable()
	.mustBe(Slice)
	if toRType(.typ()).Elem().Kind() != Uint8 { // TODO add Elem method, fix mustBe(Slice) to return slice.
		panic("reflect.Value.SetBytes of non-byte slice")
	}
	*(*[]byte)(.ptr) = 
}

// setRunes sets v's underlying value.
// It panics if v's underlying value is not a slice of runes (int32s).
func ( Value) ( []rune) {
	.mustBeAssignable()
	.mustBe(Slice)
	if .typ().Elem().Kind() != abi.Int32 {
		panic("reflect.Value.setRunes of non-rune slice")
	}
	*(*[]rune)(.ptr) = 
}

// SetComplex sets v's underlying value to x.
// It panics if v's Kind is not [Complex64] or [Complex128], or if [Value.CanSet] returns false.
func ( Value) ( complex128) {
	.mustBeAssignable()
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.SetComplex", .kind()})
	case Complex64:
		*(*complex64)(.ptr) = complex64()
	case Complex128:
		*(*complex128)(.ptr) = 
	}
}

// SetFloat sets v's underlying value to x.
// It panics if v's Kind is not [Float32] or [Float64], or if [Value.CanSet] returns false.
func ( Value) ( float64) {
	.mustBeAssignable()
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.SetFloat", .kind()})
	case Float32:
		*(*float32)(.ptr) = float32()
	case Float64:
		*(*float64)(.ptr) = 
	}
}

// SetInt sets v's underlying value to x.
// It panics if v's Kind is not [Int], [Int8], [Int16], [Int32], or [Int64], or if [Value.CanSet] returns false.
func ( Value) ( int64) {
	.mustBeAssignable()
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.SetInt", .kind()})
	case Int:
		*(*int)(.ptr) = int()
	case Int8:
		*(*int8)(.ptr) = int8()
	case Int16:
		*(*int16)(.ptr) = int16()
	case Int32:
		*(*int32)(.ptr) = int32()
	case Int64:
		*(*int64)(.ptr) = 
	}
}

// SetLen sets v's length to n.
// It panics if v's Kind is not [Slice] or if n is negative or
// greater than the capacity of the slice.
func ( Value) ( int) {
	.mustBeAssignable()
	.mustBe(Slice)
	 := (*unsafeheader.Slice)(.ptr)
	if uint() > uint(.Cap) {
		panic("reflect: slice length out of range in SetLen")
	}
	.Len = 
}

// SetCap sets v's capacity to n.
// It panics if v's Kind is not [Slice] or if n is smaller than the length or
// greater than the capacity of the slice.
func ( Value) ( int) {
	.mustBeAssignable()
	.mustBe(Slice)
	 := (*unsafeheader.Slice)(.ptr)
	if  < .Len ||  > .Cap {
		panic("reflect: slice capacity out of range in SetCap")
	}
	.Cap = 
}

// SetMapIndex sets the element associated with key in the map v to elem.
// It panics if v's Kind is not [Map].
// If elem is the zero Value, SetMapIndex deletes the key from the map.
// Otherwise if v holds a nil map, SetMapIndex will panic.
// As in Go, key's elem must be assignable to the map's key type,
// and elem's value must be assignable to the map's elem type.
func ( Value) (,  Value) {
	.mustBe(Map)
	.mustBeExported()
	.mustBeExported()
	 := (*mapType)(unsafe.Pointer(.typ()))

	if (.Key == stringType || .kind() == String) && .Key == .typ() && .Elem.Size() <= abi.MapMaxElemBytes {
		 := *(*string)(.ptr)
		if .typ() == nil {
			mapdelete_faststr(.typ(), .pointer(), )
			return
		}
		.mustBeExported()
		 = .assignTo("reflect.Value.SetMapIndex", .Elem, nil)
		var  unsafe.Pointer
		if .flag&flagIndir != 0 {
			 = .ptr
		} else {
			 = unsafe.Pointer(&.ptr)
		}
		mapassign_faststr(.typ(), .pointer(), , )
		return
	}

	 = .assignTo("reflect.Value.SetMapIndex", .Key, nil)
	var  unsafe.Pointer
	if .flag&flagIndir != 0 {
		 = .ptr
	} else {
		 = unsafe.Pointer(&.ptr)
	}
	if .typ() == nil {
		mapdelete(.typ(), .pointer(), )
		return
	}
	.mustBeExported()
	 = .assignTo("reflect.Value.SetMapIndex", .Elem, nil)
	var  unsafe.Pointer
	if .flag&flagIndir != 0 {
		 = .ptr
	} else {
		 = unsafe.Pointer(&.ptr)
	}
	mapassign(.typ(), .pointer(), , )
}

// SetUint sets v's underlying value to x.
// It panics if v's Kind is not [Uint], [Uintptr], [Uint8], [Uint16], [Uint32], or [Uint64], or if [Value.CanSet] returns false.
func ( Value) ( uint64) {
	.mustBeAssignable()
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.SetUint", .kind()})
	case Uint:
		*(*uint)(.ptr) = uint()
	case Uint8:
		*(*uint8)(.ptr) = uint8()
	case Uint16:
		*(*uint16)(.ptr) = uint16()
	case Uint32:
		*(*uint32)(.ptr) = uint32()
	case Uint64:
		*(*uint64)(.ptr) = 
	case Uintptr:
		*(*uintptr)(.ptr) = uintptr()
	}
}

// SetPointer sets the [unsafe.Pointer] value v to x.
// It panics if v's Kind is not [UnsafePointer].
func ( Value) ( unsafe.Pointer) {
	.mustBeAssignable()
	.mustBe(UnsafePointer)
	*(*unsafe.Pointer)(.ptr) = 
}

// SetString sets v's underlying value to x.
// It panics if v's Kind is not [String] or if [Value.CanSet] returns false.
func ( Value) ( string) {
	.mustBeAssignable()
	.mustBe(String)
	*(*string)(.ptr) = 
}

// Slice returns v[i:j].
// It panics if v's Kind is not [Array], [Slice] or [String], or if v is an unaddressable array,
// or if the indexes are out of bounds.
func ( Value) (,  int) Value {
	var (
		  int
		  *sliceType
		 unsafe.Pointer
	)
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.Slice", .kind()})

	case Array:
		if .flag&flagAddr == 0 {
			panic("reflect.Value.Slice: slice of unaddressable array")
		}
		 := (*arrayType)(unsafe.Pointer(.typ()))
		 = int(.Len)
		 = (*sliceType)(unsafe.Pointer(.Slice))
		 = .ptr

	case Slice:
		 = (*sliceType)(unsafe.Pointer(.typ()))
		 := (*unsafeheader.Slice)(.ptr)
		 = .Data
		 = .Cap

	case String:
		 := (*unsafeheader.String)(.ptr)
		if  < 0 ||  <  ||  > .Len {
			panic("reflect.Value.Slice: string slice index out of bounds")
		}
		var  unsafeheader.String
		if  < .Len {
			 = unsafeheader.String{Data: arrayAt(.Data, , 1, "i < s.Len"), Len:  - }
		}
		return Value{.typ(), unsafe.Pointer(&), .flag}
	}

	if  < 0 ||  <  ||  >  {
		panic("reflect.Value.Slice: slice index out of bounds")
	}

	// Declare slice so that gc can see the base pointer in it.
	var  []unsafe.Pointer

	// Reinterpret as *unsafeheader.Slice to edit.
	 := (*unsafeheader.Slice)(unsafe.Pointer(&))
	.Len =  - 
	.Cap =  - 
	if - > 0 {
		.Data = arrayAt(, , .Elem.Size(), "i < cap")
	} else {
		// do not advance pointer, to avoid pointing beyond end of slice
		.Data = 
	}

	 := .flag.ro() | flagIndir | flag(Slice)
	return Value{.Common(), unsafe.Pointer(&), }
}

// Slice3 is the 3-index form of the slice operation: it returns v[i:j:k].
// It panics if v's Kind is not [Array] or [Slice], or if v is an unaddressable array,
// or if the indexes are out of bounds.
func ( Value) (, ,  int) Value {
	var (
		  int
		  *sliceType
		 unsafe.Pointer
	)
	switch  := .kind();  {
	default:
		panic(&ValueError{"reflect.Value.Slice3", .kind()})

	case Array:
		if .flag&flagAddr == 0 {
			panic("reflect.Value.Slice3: slice of unaddressable array")
		}
		 := (*arrayType)(unsafe.Pointer(.typ()))
		 = int(.Len)
		 = (*sliceType)(unsafe.Pointer(.Slice))
		 = .ptr

	case Slice:
		 = (*sliceType)(unsafe.Pointer(.typ()))
		 := (*unsafeheader.Slice)(.ptr)
		 = .Data
		 = .Cap
	}

	if  < 0 ||  <  ||  <  ||  >  {
		panic("reflect.Value.Slice3: slice index out of bounds")
	}

	// Declare slice so that the garbage collector
	// can see the base pointer in it.
	var  []unsafe.Pointer

	// Reinterpret as *unsafeheader.Slice to edit.
	 := (*unsafeheader.Slice)(unsafe.Pointer(&))
	.Len =  - 
	.Cap =  - 
	if - > 0 {
		.Data = arrayAt(, , .Elem.Size(), "i < k <= cap")
	} else {
		// do not advance pointer, to avoid pointing beyond end of slice
		.Data = 
	}

	 := .flag.ro() | flagIndir | flag(Slice)
	return Value{.Common(), unsafe.Pointer(&), }
}

// String returns the string v's underlying value, as a string.
// String is a special case because of Go's String method convention.
// Unlike the other getters, it does not panic if v's Kind is not [String].
// Instead, it returns a string of the form "<T value>" where T is v's type.
// The fmt package treats Values specially. It does not call their String
// method implicitly but instead prints the concrete values they hold.
func ( Value) () string {
	// stringNonString is split out to keep String inlineable for string kinds.
	if .kind() == String {
		return *(*string)(.ptr)
	}
	return .stringNonString()
}

func ( Value) () string {
	if .kind() == Invalid {
		return "<invalid Value>"
	}
	// If you call String on a reflect.Value of other type, it's better to
	// print something than to panic. Useful in debugging.
	return "<" + .Type().String() + " Value>"
}

// TryRecv attempts to receive a value from the channel v but will not block.
// It panics if v's Kind is not [Chan].
// If the receive delivers a value, x is the transferred value and ok is true.
// If the receive cannot finish without blocking, x is the zero Value and ok is false.
// If the channel is closed, x is the zero value for the channel's element type and ok is false.
func ( Value) () ( Value,  bool) {
	.mustBe(Chan)
	.mustBeExported()
	return .recv(true)
}

// TrySend attempts to send x on the channel v but will not block.
// It panics if v's Kind is not [Chan].
// It reports whether the value was sent.
// As in Go, x's value must be assignable to the channel's element type.
func ( Value) ( Value) bool {
	.mustBe(Chan)
	.mustBeExported()
	return .send(, true)
}

// Type returns v's type.
func ( Value) () Type {
	if .flag != 0 && .flag&flagMethod == 0 {
		return (*rtype)(noescape(unsafe.Pointer(.typ_))) // inline of toRType(v.typ()), for own inlining in inline test
	}
	return .typeSlow()
}

func ( Value) () Type {
	if .flag == 0 {
		panic(&ValueError{"reflect.Value.Type", Invalid})
	}

	 := .typ()
	if .flag&flagMethod == 0 {
		return toRType(.typ())
	}

	// Method value.
	// v.typ describes the receiver, not the method type.
	 := int(.flag) >> flagMethodShift
	if .typ().Kind() == abi.Interface {
		// Method on interface.
		 := (*interfaceType)(unsafe.Pointer())
		if uint() >= uint(len(.Methods)) {
			panic("reflect: internal error: invalid method index")
		}
		 := &.Methods[]
		return toRType(typeOffFor(, .Typ))
	}
	// Method on concrete type.
	 := .ExportedMethods()
	if uint() >= uint(len()) {
		panic("reflect: internal error: invalid method index")
	}
	 := []
	return toRType(typeOffFor(, .Mtyp))
}

// CanUint reports whether [Value.Uint] can be used without panicking.
func ( Value) () bool {
	switch .kind() {
	case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
		return true
	default:
		return false
	}
}

// Uint returns v's underlying value, as a uint64.
// It panics if v's Kind is not [Uint], [Uintptr], [Uint8], [Uint16], [Uint32], or [Uint64].
func ( Value) () uint64 {
	 := .kind()
	 := .ptr
	switch  {
	case Uint:
		return uint64(*(*uint)())
	case Uint8:
		return uint64(*(*uint8)())
	case Uint16:
		return uint64(*(*uint16)())
	case Uint32:
		return uint64(*(*uint32)())
	case Uint64:
		return *(*uint64)()
	case Uintptr:
		return uint64(*(*uintptr)())
	}
	panic(&ValueError{"reflect.Value.Uint", .kind()})
}

//go:nocheckptr
// This prevents inlining Value.UnsafeAddr when -d=checkptr is enabled,
// which ensures cmd/compile can recognize unsafe.Pointer(v.UnsafeAddr())
// and make an exception.

// UnsafeAddr returns a pointer to v's data, as a uintptr.
// It panics if v is not addressable.
//
// It's preferred to use uintptr(Value.Addr().UnsafePointer()) to get the equivalent result.
func ( Value) () uintptr {
	if .typ() == nil {
		panic(&ValueError{"reflect.Value.UnsafeAddr", Invalid})
	}
	if .flag&flagAddr == 0 {
		panic("reflect.Value.UnsafeAddr of unaddressable value")
	}
	// The compiler loses track as it converts to uintptr. Force escape.
	escapes(.ptr)
	return uintptr(.ptr)
}

// UnsafePointer returns v's value as a [unsafe.Pointer].
// It panics if v's Kind is not [Chan], [Func], [Map], [Pointer], [Slice], [String] or [UnsafePointer].
//
// If v's Kind is [Func], the returned pointer is an underlying
// code pointer, but not necessarily enough to identify a
// single function uniquely. The only guarantee is that the
// result is zero if and only if v is a nil func Value.
//
// If v's Kind is [Slice], the returned pointer is to the first
// element of the slice. If the slice is nil the returned value
// is nil.  If the slice is empty but non-nil the return value is non-nil.
//
// If v's Kind is [String], the returned pointer is to the first
// element of the underlying bytes of string.
func ( Value) () unsafe.Pointer {
	 := .kind()
	switch  {
	case Pointer:
		if !.typ().Pointers() {
			// Since it is a not-in-heap pointer, all pointers to the heap are
			// forbidden! See comment in Value.Elem and issue #48399.
			if !verifyNotInHeapPtr(*(*uintptr)(.ptr)) {
				panic("reflect: reflect.Value.UnsafePointer on an invalid notinheap pointer")
			}
			return *(*unsafe.Pointer)(.ptr)
		}
		fallthrough
	case Chan, Map, UnsafePointer:
		return .pointer()
	case Func:
		if .flag&flagMethod != 0 {
			// As the doc comment says, the returned pointer is an
			// underlying code pointer but not necessarily enough to
			// identify a single function uniquely. All method expressions
			// created via reflect have the same underlying code pointer,
			// so their Pointers are equal. The function used here must
			// match the one used in makeMethodValue.
			 := methodValueCallCodePtr()
			return *(*unsafe.Pointer)(unsafe.Pointer(&))
		}
		 := .pointer()
		// Non-nil func value points at data block.
		// First word of data block is actual code.
		if  != nil {
			 = *(*unsafe.Pointer)()
		}
		return 
	case Slice:
		return (*unsafeheader.Slice)(.ptr).Data
	case String:
		return (*unsafeheader.String)(.ptr).Data
	}
	panic(&ValueError{"reflect.Value.UnsafePointer", .kind()})
}

// StringHeader is the runtime representation of a string.
// It cannot be used safely or portably and its representation may
// change in a later release.
// Moreover, the Data field is not sufficient to guarantee the data
// it references will not be garbage collected, so programs must keep
// a separate, correctly typed pointer to the underlying data.
//
// Deprecated: Use unsafe.String or unsafe.StringData instead.
type StringHeader struct {
	Data uintptr
	Len  int
}

// SliceHeader is the runtime representation of a slice.
// It cannot be used safely or portably and its representation may
// change in a later release.
// Moreover, the Data field is not sufficient to guarantee the data
// it references will not be garbage collected, so programs must keep
// a separate, correctly typed pointer to the underlying data.
//
// Deprecated: Use unsafe.Slice or unsafe.SliceData instead.
type SliceHeader struct {
	Data uintptr
	Len  int
	Cap  int
}

func typesMustMatch( string, ,  Type) {
	if  !=  {
		panic( + ": " + .String() + " != " + .String())
	}
}

// arrayAt returns the i-th element of p,
// an array whose elements are eltSize bytes wide.
// The array pointed at by p must have at least i+1 elements:
// it is invalid (but impossible to check here) to pass i >= len,
// because then the result will point outside the array.
// whySafe must explain why i < len. (Passing "i < len" is fine;
// the benefit is to surface this assumption at the call site.)
func arrayAt( unsafe.Pointer,  int,  uintptr,  string) unsafe.Pointer {
	return add(, uintptr()*, "i < len")
}

// Grow increases the slice's capacity, if necessary, to guarantee space for
// another n elements. After Grow(n), at least n elements can be appended
// to the slice without another allocation.
//
// It panics if v's Kind is not a [Slice] or if n is negative or too large to
// allocate the memory.
func ( Value) ( int) {
	.mustBeAssignable()
	.mustBe(Slice)
	.grow()
}

// grow is identical to Grow but does not check for assignability.
func ( Value) ( int) {
	 := (*unsafeheader.Slice)(.ptr)
	switch {
	case  < 0:
		panic("reflect.Value.Grow: negative len")
	case .Len+ < 0:
		panic("reflect.Value.Grow: slice overflow")
	case .Len+ > .Cap:
		 := .typ().Elem()
		* = growslice(, *, )
	}
}

// extendSlice extends a slice by n elements.
//
// Unlike Value.grow, which modifies the slice in place and
// does not change the length of the slice in place,
// extendSlice returns a new slice value with the length
// incremented by the number of specified elements.
func ( Value) ( int) Value {
	.mustBeExported()
	.mustBe(Slice)

	// Shallow copy the slice header to avoid mutating the source slice.
	 := *(*unsafeheader.Slice)(.ptr)
	 := &
	.ptr = unsafe.Pointer()
	.flag = flagIndir | flag(Slice) // equivalent flag to MakeSlice

	.grow() // fine to treat as assignable since we allocate a new slice header
	.Len += 
	return 
}

// Clear clears the contents of a map or zeros the contents of a slice.
//
// It panics if v's Kind is not [Map] or [Slice].
func ( Value) () {
	switch .Kind() {
	case Slice:
		 := *(*unsafeheader.Slice)(.ptr)
		 := (*sliceType)(unsafe.Pointer(.typ()))
		typedarrayclear(.Elem, .Data, .Len)
	case Map:
		mapclear(.typ(), .pointer())
	default:
		panic(&ValueError{"reflect.Value.Clear", .Kind()})
	}
}

// Append appends the values x to a slice s and returns the resulting slice.
// As in Go, each x's value must be assignable to the slice's element type.
func ( Value,  ...Value) Value {
	.mustBe(Slice)
	 := .Len()
	 = .extendSlice(len())
	for ,  := range  {
		.Index( + ).Set()
	}
	return 
}

// AppendSlice appends a slice t to a slice s and returns the resulting slice.
// The slices s and t must have the same element type.
func (,  Value) Value {
	.mustBe(Slice)
	.mustBe(Slice)
	typesMustMatch("reflect.AppendSlice", .Type().Elem(), .Type().Elem())
	 := .Len()
	 := .Len()
	 = .extendSlice()
	Copy(.Slice(, +), )
	return 
}

// Copy copies the contents of src into dst until either
// dst has been filled or src has been exhausted.
// It returns the number of elements copied.
// Dst and src each must have kind [Slice] or [Array], and
// dst and src must have the same element type.
//
// As a special case, src can have kind [String] if the element type of dst is kind [Uint8].
func (,  Value) int {
	 := .kind()
	if  != Array &&  != Slice {
		panic(&ValueError{"reflect.Copy", })
	}
	if  == Array {
		.mustBeAssignable()
	}
	.mustBeExported()

	 := .kind()
	var  bool
	if  != Array &&  != Slice {
		 =  == String && .typ().Elem().Kind() == abi.Uint8
		if ! {
			panic(&ValueError{"reflect.Copy", })
		}
	}
	.mustBeExported()

	 := .typ().Elem()
	if ! {
		 := .typ().Elem()
		typesMustMatch("reflect.Copy", toType(), toType())
	}

	var ,  unsafeheader.Slice
	if  == Array {
		.Data = .ptr
		.Len = .Len()
		.Cap = .Len
	} else {
		 = *(*unsafeheader.Slice)(.ptr)
	}
	if  == Array {
		.Data = .ptr
		.Len = .Len()
		.Cap = .Len
	} else if  == Slice {
		 = *(*unsafeheader.Slice)(.ptr)
	} else {
		 := *(*unsafeheader.String)(.ptr)
		.Data = .Data
		.Len = .Len
		.Cap = .Len
	}

	return typedslicecopy(.Common(), , )
}

// A runtimeSelect is a single case passed to rselect.
// This must match ../runtime/select.go:/runtimeSelect
type runtimeSelect struct {
	dir SelectDir      // SelectSend, SelectRecv or SelectDefault
	typ *rtype         // channel type
	ch  unsafe.Pointer // channel
	val unsafe.Pointer // ptr to data (SendDir) or ptr to receive buffer (RecvDir)
}

// rselect runs a select. It returns the index of the chosen case.
// If the case was a receive, val is filled in with the received value.
// The conventional OK bool indicates whether the receive corresponds
// to a sent value.
//
// rselect generally doesn't escape the runtimeSelect slice, except
// that for the send case the value to send needs to escape. We don't
// have a way to represent that in the function signature. So we handle
// that with a forced escape in function Select.
//
//go:noescape
func rselect([]runtimeSelect) ( int,  bool)

// A SelectDir describes the communication direction of a select case.
type SelectDir int

// NOTE: These values must match ../runtime/select.go:/selectDir.

const (
	_             SelectDir = iota
	SelectSend              // case Chan <- Send
	SelectRecv              // case <-Chan:
	SelectDefault           // default
)

// A SelectCase describes a single case in a select operation.
// The kind of case depends on Dir, the communication direction.
//
// If Dir is SelectDefault, the case represents a default case.
// Chan and Send must be zero Values.
//
// If Dir is SelectSend, the case represents a send operation.
// Normally Chan's underlying value must be a channel, and Send's underlying value must be
// assignable to the channel's element type. As a special case, if Chan is a zero Value,
// then the case is ignored, and the field Send will also be ignored and may be either zero
// or non-zero.
//
// If Dir is [SelectRecv], the case represents a receive operation.
// Normally Chan's underlying value must be a channel and Send must be a zero Value.
// If Chan is a zero Value, then the case is ignored, but Send must still be a zero Value.
// When a receive operation is selected, the received Value is returned by Select.
type SelectCase struct {
	Dir  SelectDir // direction of case
	Chan Value     // channel to use (for send or receive)
	Send Value     // value to send (for send)
}

// Select executes a select operation described by the list of cases.
// Like the Go select statement, it blocks until at least one of the cases
// can proceed, makes a uniform pseudo-random choice,
// and then executes that case. It returns the index of the chosen case
// and, if that case was a receive operation, the value received and a
// boolean indicating whether the value corresponds to a send on the channel
// (as opposed to a zero value received because the channel is closed).
// Select supports a maximum of 65536 cases.
func ( []SelectCase) ( int,  Value,  bool) {
	if len() > 65536 {
		panic("reflect.Select: too many cases (max 65536)")
	}
	// NOTE: Do not trust that caller is not modifying cases data underfoot.
	// The range is safe because the caller cannot modify our copy of the len
	// and each iteration makes its own copy of the value c.
	var  []runtimeSelect
	if len() > 4 {
		// Slice is heap allocated due to runtime dependent capacity.
		 = make([]runtimeSelect, len())
	} else {
		// Slice can be stack allocated due to constant capacity.
		 = make([]runtimeSelect, len(), 4)
	}

	 := false
	for ,  := range  {
		 := &[]
		.dir = .Dir
		switch .Dir {
		default:
			panic("reflect.Select: invalid Dir")

		case SelectDefault: // default
			if  {
				panic("reflect.Select: multiple default cases")
			}
			 = true
			if .Chan.IsValid() {
				panic("reflect.Select: default case has Chan value")
			}
			if .Send.IsValid() {
				panic("reflect.Select: default case has Send value")
			}

		case SelectSend:
			 := .Chan
			if !.IsValid() {
				break
			}
			.mustBe(Chan)
			.mustBeExported()
			 := (*chanType)(unsafe.Pointer(.typ()))
			if ChanDir(.Dir)&SendDir == 0 {
				panic("reflect.Select: SendDir case using recv-only channel")
			}
			.ch = .pointer()
			.typ = toRType(&.Type)
			 := .Send
			if !.IsValid() {
				panic("reflect.Select: SendDir case missing Send value")
			}
			.mustBeExported()
			 = .assignTo("reflect.Select", .Elem, nil)
			if .flag&flagIndir != 0 {
				.val = .ptr
			} else {
				.val = unsafe.Pointer(&.ptr)
			}
			// The value to send needs to escape. See the comment at rselect for
			// why we need forced escape.
			escapes(.val)

		case SelectRecv:
			if .Send.IsValid() {
				panic("reflect.Select: RecvDir case has Send value")
			}
			 := .Chan
			if !.IsValid() {
				break
			}
			.mustBe(Chan)
			.mustBeExported()
			 := (*chanType)(unsafe.Pointer(.typ()))
			if ChanDir(.Dir)&RecvDir == 0 {
				panic("reflect.Select: RecvDir case using send-only channel")
			}
			.ch = .pointer()
			.typ = toRType(&.Type)
			.val = unsafe_New(.Elem)
		}
	}

	,  = rselect()
	if [].dir == SelectRecv {
		 := (*chanType)(unsafe.Pointer([].typ))
		 := .Elem
		 := [].val
		 := flag(.Kind())
		if .IfaceIndir() {
			 = Value{, ,  | flagIndir}
		} else {
			 = Value{, *(*unsafe.Pointer)(), }
		}
	}
	return , , 
}

/*
 * constructors
 */

// implemented in package runtime

//go:noescape
func unsafe_New(*abi.Type) unsafe.Pointer

//go:noescape
func unsafe_NewArray(*abi.Type, int) unsafe.Pointer

// MakeSlice creates a new zero-initialized slice value
// for the specified slice type, length, and capacity.
func ( Type, ,  int) Value {
	if .Kind() != Slice {
		panic("reflect.MakeSlice of non-slice type")
	}
	if  < 0 {
		panic("reflect.MakeSlice: negative len")
	}
	if  < 0 {
		panic("reflect.MakeSlice: negative cap")
	}
	if  >  {
		panic("reflect.MakeSlice: len > cap")
	}

	 := unsafeheader.Slice{Data: unsafe_NewArray(&(.Elem().(*rtype).t), ), Len: , Cap: }
	return Value{&.(*rtype).t, unsafe.Pointer(&), flagIndir | flag(Slice)}
}

// SliceAt returns a [Value] representing a slice whose underlying
// data starts at p, with length and capacity equal to n.
//
// This is like [unsafe.Slice].
func ( Type,  unsafe.Pointer,  int) Value {
	unsafeslice(.common(), , )
	 := unsafeheader.Slice{Data: , Len: , Cap: }
	return Value{SliceOf().common(), unsafe.Pointer(&), flagIndir | flag(Slice)}
}

// MakeChan creates a new channel with the specified type and buffer size.
func ( Type,  int) Value {
	if .Kind() != Chan {
		panic("reflect.MakeChan of non-chan type")
	}
	if  < 0 {
		panic("reflect.MakeChan: negative buffer size")
	}
	if .ChanDir() != BothDir {
		panic("reflect.MakeChan: unidirectional channel type")
	}
	 := .common()
	 := makechan(, )
	return Value{, , flag(Chan)}
}

// MakeMap creates a new map with the specified type.
func ( Type) Value {
	return MakeMapWithSize(, 0)
}

// MakeMapWithSize creates a new map with the specified type
// and initial space for approximately n elements.
func ( Type,  int) Value {
	if .Kind() != Map {
		panic("reflect.MakeMapWithSize of non-map type")
	}
	 := .common()
	 := makemap(, )
	return Value{, , flag(Map)}
}

// Indirect returns the value that v points to.
// If v is a nil pointer, Indirect returns a zero Value.
// If v is not a pointer, Indirect returns v.
func ( Value) Value {
	if .Kind() != Pointer {
		return 
	}
	return .Elem()
}

// ValueOf returns a new Value initialized to the concrete value
// stored in the interface i. ValueOf(nil) returns the zero Value.
func ( any) Value {
	if  == nil {
		return Value{}
	}
	return unpackEface()
}

// Zero returns a Value representing the zero value for the specified type.
// The result is different from the zero value of the Value struct,
// which represents no value at all.
// For example, Zero(TypeOf(42)) returns a Value with Kind [Int] and value 0.
// The returned value is neither addressable nor settable.
func ( Type) Value {
	if  == nil {
		panic("reflect: Zero(nil)")
	}
	 := &.(*rtype).t
	 := flag(.Kind())
	if .IfaceIndir() {
		var  unsafe.Pointer
		if .Size() <= abi.ZeroValSize {
			 = unsafe.Pointer(&zeroVal[0])
		} else {
			 = unsafe_New()
		}
		return Value{, ,  | flagIndir}
	}
	return Value{, nil, }
}

//go:linkname zeroVal runtime.zeroVal
var zeroVal [abi.ZeroValSize]byte

// New returns a Value representing a pointer to a new zero value
// for the specified type. That is, the returned Value's Type is [PointerTo](typ).
func ( Type) Value {
	if  == nil {
		panic("reflect: New(nil)")
	}
	 := &.(*rtype).t
	 := ptrTo()
	if .IfaceIndir() {
		// This is a pointer to a not-in-heap type.
		panic("reflect: New of type that may not be allocated in heap (possibly undefined cgo C type)")
	}
	 := unsafe_New()
	 := flag(Pointer)
	return Value{, , }
}

// NewAt returns a Value representing a pointer to a value of the
// specified type, using p as that pointer.
func ( Type,  unsafe.Pointer) Value {
	 := flag(Pointer)
	 := .(*rtype)
	return Value{.ptrTo(), , }
}

// assignTo returns a value v that can be assigned directly to dst.
// It panics if v is not assignable to dst.
// For a conversion to an interface type, target, if not nil,
// is a suggested scratch space to use.
// target must be initialized memory (or nil).
func ( Value) ( string,  *abi.Type,  unsafe.Pointer) Value {
	if .flag&flagMethod != 0 {
		 = makeMethodValue(, )
	}

	switch {
	case directlyAssignable(, .typ()):
		// Overwrite type so that they match.
		// Same memory layout, so no harm done.
		 := .flag&(flagAddr|flagIndir) | .flag.ro()
		 |= flag(.Kind())
		return Value{, .ptr, }

	case implements(, .typ()):
		if .Kind() == Interface && .IsNil() {
			// A nil ReadWriter passed to nil Reader is OK,
			// but using ifaceE2I below will panic.
			// Avoid the panic by returning a nil dst (e.g., Reader) explicitly.
			return Value{, nil, flag(Interface)}
		}
		 := valueInterface(, false)
		if  == nil {
			 = unsafe_New()
		}
		if .NumMethod() == 0 {
			*(*any)() = 
		} else {
			ifaceE2I(, , )
		}
		return Value{, , flagIndir | flag(Interface)}
	}

	// Failed.
	panic( + ": value of type " + stringFor(.typ()) + " is not assignable to type " + stringFor())
}

// Convert returns the value v converted to type t.
// If the usual Go conversion rules do not allow conversion
// of the value v to type t, or if converting v to type t panics, Convert panics.
func ( Value) ( Type) Value {
	if .flag&flagMethod != 0 {
		 = makeMethodValue("Convert", )
	}
	 := convertOp(.common(), .typ())
	if  == nil {
		panic("reflect.Value.Convert: value of type " + stringFor(.typ()) + " cannot be converted to type " + .String())
	}
	return (, )
}

// CanConvert reports whether the value v can be converted to type t.
// If v.CanConvert(t) returns true then v.Convert(t) will not panic.
func ( Value) ( Type) bool {
	 := .Type()
	if !.ConvertibleTo() {
		return false
	}
	// Converting from slice to array or to pointer-to-array can panic
	// depending on the value.
	switch {
	case .Kind() == Slice && .Kind() == Array:
		if .Len() > .Len() {
			return false
		}
	case .Kind() == Slice && .Kind() == Pointer && .Elem().Kind() == Array:
		 := .Elem().Len()
		if  > .Len() {
			return false
		}
	}
	return true
}

// Comparable reports whether the value v is comparable.
// If the type of v is an interface, this checks the dynamic type.
// If this reports true then v.Interface() == x will not panic for any x,
// nor will v.Equal(u) for any Value u.
func ( Value) () bool {
	 := .Kind()
	switch  {
	case Invalid:
		return false

	case Array:
		switch .Type().Elem().Kind() {
		case Interface, Array, Struct:
			for  := 0;  < .Type().Len(); ++ {
				if !.Index().() {
					return false
				}
			}
			return true
		}
		return .Type().Comparable()

	case Interface:
		return .IsNil() || .Elem().()

	case Struct:
		for  := 0;  < .NumField(); ++ {
			if !.Field().() {
				return false
			}
		}
		return true

	default:
		return .Type().Comparable()
	}
}

// Equal reports true if v is equal to u.
// For two invalid values, Equal will report true.
// For an interface value, Equal will compare the value within the interface.
// Otherwise, If the values have different types, Equal will report false.
// Otherwise, for arrays and structs Equal will compare each element in order,
// and report false if it finds non-equal elements.
// During all comparisons, if values of the same type are compared,
// and the type is not comparable, Equal will panic.
func ( Value) ( Value) bool {
	if .Kind() == Interface {
		 = .Elem()
	}
	if .Kind() == Interface {
		 = .Elem()
	}

	if !.IsValid() || !.IsValid() {
		return .IsValid() == .IsValid()
	}

	if .Kind() != .Kind() || .Type() != .Type() {
		return false
	}

	// Handle each Kind directly rather than calling valueInterface
	// to avoid allocating.
	switch .Kind() {
	default:
		panic("reflect.Value.Equal: invalid Kind")
	case Bool:
		return .Bool() == .Bool()
	case Int, Int8, Int16, Int32, Int64:
		return .Int() == .Int()
	case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
		return .Uint() == .Uint()
	case Float32, Float64:
		return .Float() == .Float()
	case Complex64, Complex128:
		return .Complex() == .Complex()
	case String:
		return .String() == .String()
	case Chan, Pointer, UnsafePointer:
		return .Pointer() == .Pointer()
	case Array:
		// u and v have the same type so they have the same length
		 := .Len()
		if  == 0 {
			// panic on [0]func()
			if !.Type().Elem().Comparable() {
				break
			}
			return true
		}
		for  := 0;  < ; ++ {
			if !.Index().(.Index()) {
				return false
			}
		}
		return true
	case Struct:
		// u and v have the same type so they have the same fields
		 := .NumField()
		for  := 0;  < ; ++ {
			if !.Field().(.Field()) {
				return false
			}
		}
		return true
	case Func, Map, Slice:
		break
	}
	panic("reflect.Value.Equal: values of type " + .Type().String() + " are not comparable")
}

// convertOp returns the function to convert a value of type src
// to a value of type dst. If the conversion is illegal, convertOp returns nil.
func convertOp(,  *abi.Type) func(Value, Type) Value {
	switch Kind(.Kind()) {
	case Int, Int8, Int16, Int32, Int64:
		switch Kind(.Kind()) {
		case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
			return cvtInt
		case Float32, Float64:
			return cvtIntFloat
		case String:
			return cvtIntString
		}

	case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
		switch Kind(.Kind()) {
		case Int, Int8, Int16, Int32, Int64, Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
			return cvtUint
		case Float32, Float64:
			return cvtUintFloat
		case String:
			return cvtUintString
		}

	case Float32, Float64:
		switch Kind(.Kind()) {
		case Int, Int8, Int16, Int32, Int64:
			return cvtFloatInt
		case Uint, Uint8, Uint16, Uint32, Uint64, Uintptr:
			return cvtFloatUint
		case Float32, Float64:
			return cvtFloat
		}

	case Complex64, Complex128:
		switch Kind(.Kind()) {
		case Complex64, Complex128:
			return cvtComplex
		}

	case String:
		if .Kind() == abi.Slice && pkgPathFor(.Elem()) == "" {
			switch Kind(.Elem().Kind()) {
			case Uint8:
				return cvtStringBytes
			case Int32:
				return cvtStringRunes
			}
		}

	case Slice:
		if .Kind() == abi.String && pkgPathFor(.Elem()) == "" {
			switch Kind(.Elem().Kind()) {
			case Uint8:
				return cvtBytesString
			case Int32:
				return cvtRunesString
			}
		}
		// "x is a slice, T is a pointer-to-array type,
		// and the slice and array types have identical element types."
		if .Kind() == abi.Pointer && .Elem().Kind() == abi.Array && .Elem() == .Elem().Elem() {
			return cvtSliceArrayPtr
		}
		// "x is a slice, T is an array type,
		// and the slice and array types have identical element types."
		if .Kind() == abi.Array && .Elem() == .Elem() {
			return cvtSliceArray
		}

	case Chan:
		if .Kind() == abi.Chan && specialChannelAssignability(, ) {
			return cvtDirect
		}
	}

	// dst and src have same underlying type.
	if haveIdenticalUnderlyingType(, , false) {
		return cvtDirect
	}

	// dst and src are non-defined pointer types with same underlying base type.
	if .Kind() == abi.Pointer && nameFor() == "" &&
		.Kind() == abi.Pointer && nameFor() == "" &&
		haveIdenticalUnderlyingType(elem(), elem(), false) {
		return cvtDirect
	}

	if implements(, ) {
		if .Kind() == abi.Interface {
			return cvtI2I
		}
		return cvtT2I
	}

	return nil
}

// makeInt returns a Value of type t equal to bits (possibly truncated),
// where t is a signed or unsigned int type.
func makeInt( flag,  uint64,  Type) Value {
	 := .common()
	 := unsafe_New()
	switch .Size() {
	case 1:
		*(*uint8)() = uint8()
	case 2:
		*(*uint16)() = uint16()
	case 4:
		*(*uint32)() = uint32()
	case 8:
		*(*uint64)() = 
	}
	return Value{, ,  | flagIndir | flag(.Kind())}
}

// makeFloat returns a Value of type t equal to v (possibly truncated to float32),
// where t is a float32 or float64 type.
func makeFloat( flag,  float64,  Type) Value {
	 := .common()
	 := unsafe_New()
	switch .Size() {
	case 4:
		*(*float32)() = float32()
	case 8:
		*(*float64)() = 
	}
	return Value{, ,  | flagIndir | flag(.Kind())}
}

// makeFloat32 returns a Value of type t equal to v, where t is a float32 type.
func makeFloat32( flag,  float32,  Type) Value {
	 := .common()
	 := unsafe_New()
	*(*float32)() = 
	return Value{, ,  | flagIndir | flag(.Kind())}
}

// makeComplex returns a Value of type t equal to v (possibly truncated to complex64),
// where t is a complex64 or complex128 type.
func makeComplex( flag,  complex128,  Type) Value {
	 := .common()
	 := unsafe_New()
	switch .Size() {
	case 8:
		*(*complex64)() = complex64()
	case 16:
		*(*complex128)() = 
	}
	return Value{, ,  | flagIndir | flag(.Kind())}
}

func makeString( flag,  string,  Type) Value {
	 := New().Elem()
	.SetString()
	.flag = .flag&^flagAddr | 
	return 
}

func makeBytes( flag,  []byte,  Type) Value {
	 := New().Elem()
	.SetBytes()
	.flag = .flag&^flagAddr | 
	return 
}

func makeRunes( flag,  []rune,  Type) Value {
	 := New().Elem()
	.setRunes()
	.flag = .flag&^flagAddr | 
	return 
}

// These conversion functions are returned by convertOp
// for classes of conversions. For example, the first function, cvtInt,
// takes any value v of signed int type and returns the value converted
// to type t, where t is any signed or unsigned int type.

// convertOp: intXX -> [u]intXX
func cvtInt( Value,  Type) Value {
	return makeInt(.flag.ro(), uint64(.Int()), )
}

// convertOp: uintXX -> [u]intXX
func cvtUint( Value,  Type) Value {
	return makeInt(.flag.ro(), .Uint(), )
}

// convertOp: floatXX -> intXX
func cvtFloatInt( Value,  Type) Value {
	return makeInt(.flag.ro(), uint64(int64(.Float())), )
}

// convertOp: floatXX -> uintXX
func cvtFloatUint( Value,  Type) Value {
	return makeInt(.flag.ro(), uint64(.Float()), )
}

// convertOp: intXX -> floatXX
func cvtIntFloat( Value,  Type) Value {
	return makeFloat(.flag.ro(), float64(.Int()), )
}

// convertOp: uintXX -> floatXX
func cvtUintFloat( Value,  Type) Value {
	return makeFloat(.flag.ro(), float64(.Uint()), )
}

// convertOp: floatXX -> floatXX
func cvtFloat( Value,  Type) Value {
	if .Type().Kind() == Float32 && .Kind() == Float32 {
		// Don't do any conversion if both types have underlying type float32.
		// This avoids converting to float64 and back, which will
		// convert a signaling NaN to a quiet NaN. See issue 36400.
		return makeFloat32(.flag.ro(), *(*float32)(.ptr), )
	}
	return makeFloat(.flag.ro(), .Float(), )
}

// convertOp: complexXX -> complexXX
func cvtComplex( Value,  Type) Value {
	return makeComplex(.flag.ro(), .Complex(), )
}

// convertOp: intXX -> string
func cvtIntString( Value,  Type) Value {
	 := "\uFFFD"
	if  := .Int(); int64(rune()) ==  {
		 = string(rune())
	}
	return makeString(.flag.ro(), , )
}

// convertOp: uintXX -> string
func cvtUintString( Value,  Type) Value {
	 := "\uFFFD"
	if  := .Uint(); uint64(rune()) ==  {
		 = string(rune())
	}
	return makeString(.flag.ro(), , )
}

// convertOp: []byte -> string
func cvtBytesString( Value,  Type) Value {
	return makeString(.flag.ro(), string(.Bytes()), )
}

// convertOp: string -> []byte
func cvtStringBytes( Value,  Type) Value {
	return makeBytes(.flag.ro(), []byte(.String()), )
}

// convertOp: []rune -> string
func cvtRunesString( Value,  Type) Value {
	return makeString(.flag.ro(), string(.runes()), )
}

// convertOp: string -> []rune
func cvtStringRunes( Value,  Type) Value {
	return makeRunes(.flag.ro(), []rune(.String()), )
}

// convertOp: []T -> *[N]T
func cvtSliceArrayPtr( Value,  Type) Value {
	 := .Elem().Len()
	if  > .Len() {
		panic("reflect: cannot convert slice with length " + itoa.Itoa(.Len()) + " to pointer to array with length " + itoa.Itoa())
	}
	 := (*unsafeheader.Slice)(.ptr)
	return Value{.common(), .Data, .flag&^(flagIndir|flagAddr|flagKindMask) | flag(Pointer)}
}

// convertOp: []T -> [N]T
func cvtSliceArray( Value,  Type) Value {
	 := .Len()
	if  > .Len() {
		panic("reflect: cannot convert slice with length " + itoa.Itoa(.Len()) + " to array with length " + itoa.Itoa())
	}
	 := (*unsafeheader.Slice)(.ptr)
	 := .common()
	 := .Data
	 := unsafe_New()
	typedmemmove(, , )
	 = 

	return Value{, , .flag&^(flagAddr|flagKindMask) | flag(Array)}
}

// convertOp: direct copy
func cvtDirect( Value,  Type) Value {
	 := .flag
	 := .common()
	 := .ptr
	if &flagAddr != 0 {
		// indirect, mutable word - make a copy
		 := unsafe_New()
		typedmemmove(, , )
		 = 
		 &^= flagAddr
	}
	return Value{, , .flag.ro() | } // v.flag.ro()|f == f?
}

// convertOp: concrete -> interface
func cvtT2I( Value,  Type) Value {
	 := unsafe_New(.common())
	 := valueInterface(, false)
	if .NumMethod() == 0 {
		*(*any)() = 
	} else {
		ifaceE2I(.common(), , )
	}
	return Value{.common(), , .flag.ro() | flagIndir | flag(Interface)}
}

// convertOp: interface -> interface
func cvtI2I( Value,  Type) Value {
	if .IsNil() {
		 := Zero()
		.flag |= .flag.ro()
		return 
	}
	return cvtT2I(.Elem(), )
}

// implemented in ../runtime
//
//go:noescape
func chancap( unsafe.Pointer) int

//go:noescape
func chanclose( unsafe.Pointer)

//go:noescape
func chanlen( unsafe.Pointer) int

// Note: some of the noescape annotations below are technically a lie,
// but safe in the context of this package. Functions like chansend0
// and mapassign0 don't escape the referent, but may escape anything
// the referent points to (they do shallow copies of the referent).
// We add a 0 to their names and wrap them in functions with the
// proper escape behavior.

//go:noescape
func chanrecv( unsafe.Pointer,  bool,  unsafe.Pointer) (,  bool)

//go:noescape
func chansend0( unsafe.Pointer,  unsafe.Pointer,  bool) bool

func chansend( unsafe.Pointer,  unsafe.Pointer,  bool) bool {
	contentEscapes()
	return chansend0(, , )
}

func makechan( *abi.Type,  int) ( unsafe.Pointer)
func makemap( *abi.Type,  int) ( unsafe.Pointer)

//go:noescape
func mapaccess( *abi.Type,  unsafe.Pointer,  unsafe.Pointer) ( unsafe.Pointer)

//go:noescape
func mapaccess_faststr( *abi.Type,  unsafe.Pointer,  string) ( unsafe.Pointer)

//go:noescape
func mapassign0( *abi.Type,  unsafe.Pointer, ,  unsafe.Pointer)

// mapassign should be an internal detail,
// but widely used packages access it using linkname.
// Notable members of the hall of shame include:
//   - github.com/modern-go/reflect2
//   - github.com/goccy/go-json
//
// Do not remove or change the type signature.
// See go.dev/issue/67401.
//
//go:linkname mapassign
func mapassign( *abi.Type,  unsafe.Pointer, ,  unsafe.Pointer) {
	contentEscapes()
	contentEscapes()
	mapassign0(, , , )
}

//go:noescape
func mapassign_faststr0( *abi.Type,  unsafe.Pointer,  string,  unsafe.Pointer)

func mapassign_faststr( *abi.Type,  unsafe.Pointer,  string,  unsafe.Pointer) {
	contentEscapes((*unsafeheader.String)(unsafe.Pointer(&)).Data)
	contentEscapes()
	mapassign_faststr0(, , , )
}

//go:noescape
func mapdelete( *abi.Type,  unsafe.Pointer,  unsafe.Pointer)

//go:noescape
func mapdelete_faststr( *abi.Type,  unsafe.Pointer,  string)

//go:noescape
func mapiterinit( *abi.Type,  unsafe.Pointer,  *hiter)

//go:noescape
func mapiterkey( *hiter) ( unsafe.Pointer)

//go:noescape
func mapiterelem( *hiter) ( unsafe.Pointer)

//go:noescape
func mapiternext( *hiter)

//go:noescape
func maplen( unsafe.Pointer) int

func mapclear( *abi.Type,  unsafe.Pointer)

// call calls fn with "stackArgsSize" bytes of stack arguments laid out
// at stackArgs and register arguments laid out in regArgs. frameSize is
// the total amount of stack space that will be reserved by call, so this
// should include enough space to spill register arguments to the stack in
// case of preemption.
//
// After fn returns, call copies stackArgsSize-stackRetOffset result bytes
// back into stackArgs+stackRetOffset before returning, for any return
// values passed on the stack. Register-based return values will be found
// in the same regArgs structure.
//
// regArgs must also be prepared with an appropriate ReturnIsPtr bitmap
// indicating which registers will contain pointer-valued return values. The
// purpose of this bitmap is to keep pointers visible to the GC between
// returning from reflectcall and actually using them.
//
// If copying result bytes back from the stack, the caller must pass the
// argument frame type as stackArgsType, so that call can execute appropriate
// write barriers during the copy.
//
// Arguments passed through to call do not escape. The type is used only in a
// very limited callee of call, the stackArgs are copied, and regArgs is only
// used in the call frame.
//
//go:noescape
//go:linkname call runtime.reflectcall
func call( *abi.Type, ,  unsafe.Pointer, , ,  uint32,  *abi.RegArgs)

func ifaceE2I( *abi.Type,  any,  unsafe.Pointer)

// memmove copies size bytes to dst from src. No write barriers are used.
//
//go:noescape
func memmove(,  unsafe.Pointer,  uintptr)

// typedmemmove copies a value of type t to dst from src.
//
//go:noescape
func typedmemmove( *abi.Type, ,  unsafe.Pointer)

// typedmemclr zeros the value at ptr of type t.
//
//go:noescape
func typedmemclr( *abi.Type,  unsafe.Pointer)

// typedmemclrpartial is like typedmemclr but assumes that
// dst points off bytes into the value and only clears size bytes.
//
//go:noescape
func typedmemclrpartial( *abi.Type,  unsafe.Pointer, ,  uintptr)

// typedslicecopy copies a slice of elemType values from src to dst,
// returning the number of elements copied.
//
//go:noescape
func typedslicecopy( *abi.Type, ,  unsafeheader.Slice) int

// typedarrayclear zeroes the value at ptr of an array of elemType,
// only clears len elem.
//
//go:noescape
func typedarrayclear( *abi.Type,  unsafe.Pointer,  int)

//go:noescape
func typehash( *abi.Type,  unsafe.Pointer,  uintptr) uintptr

func verifyNotInHeapPtr( uintptr) bool

//go:noescape
func growslice( *abi.Type,  unsafeheader.Slice,  int) unsafeheader.Slice

//go:noescape
func unsafeslice( *abi.Type,  unsafe.Pointer,  int)

// Dummy annotation marking that the value x escapes,
// for use in cases where the reflect code is so clever that
// the compiler cannot follow.
func escapes( any) {
	if dummy.b {
		dummy.x = 
	}
}

var dummy struct {
	b bool
	x any
}

// Dummy annotation marking that the content of value x
// escapes (i.e. modeling roughly heap=*x),
// for use in cases where the reflect code is so clever that
// the compiler cannot follow.
func contentEscapes( unsafe.Pointer) {
	if dummy.b {
		escapes(*(*any)()) // the dereference may not always be safe, but never executed
	}
}

// This is just a wrapper around abi.NoEscape. The inlining heuristics are
// finnicky and for whatever reason treat the local call to noescape as much
// lower cost with respect to the inliner budget. (That is, replacing calls to
// noescape with abi.NoEscape will cause inlining tests to fail.)
//
//go:nosplit
func noescape( unsafe.Pointer) unsafe.Pointer {
	return abi.NoEscape()
}