// Code generated by mkmalloc.go; DO NOT EDIT.
// See overview in malloc_stubs.go.

package runtime

import (
	
	
	
	
)

func mallocgcSmallScanNoHeaderSC1( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 1

	const  = 8

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					8 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(8)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 8

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC2( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 2

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(16)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 16

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC3( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 3

	const  = 24

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					24 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(24)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 24

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC4( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 4

	const  = 32

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					32 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(32)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 32

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC5( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 5

	const  = 48

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					48 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(48)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 48

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC6( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 6

	const  = 64

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					64 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(64)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 64

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC7( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 7

	const  = 80

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					80 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(80)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 80

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC8( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 8

	const  = 96

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					96 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(96)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 96

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC9( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 9

	const  = 112

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					112 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(112)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 112

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC10( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 10

	const  = 128

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					128 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(128)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 128

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC11( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 11

	const  = 144

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					144 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(144)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 144

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC12( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 12

	const  = 160

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					160 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(160)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 160

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC13( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 13

	const  = 176

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					176 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(176)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 176

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC14( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 14

	const  = 192

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					192 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(192)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 192

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC15( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 15

	const  = 208

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					208 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(208)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 208

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC16( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 16

	const  = 224

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					224 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(224)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 224

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC17( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 17

	const  = 240

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					240 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(240)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 240

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC18( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 18

	const  = 256

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					256 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(256)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 256

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC19( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 19

	const  = 288

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					288 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(288)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 288

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC20( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 20

	const  = 320

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					320 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(320)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 320

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC21( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 21

	const  = 352

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					352 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(352)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 352

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC22( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 22

	const  = 384

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					384 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(384)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 384

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC23( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 23

	const  = 416

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					416 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(416)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 416

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC24( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 24

	const  = 448

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					448 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(448)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 448

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC25( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 25

	const  = 480

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					480 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(480)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 480

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallScanNoHeaderSC26( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 26

	const  = 512

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallScanNoHeader(, , )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(0)
	 := .alloc[]

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					512 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if .needzero != 0 {
		memclrNoHeapPointers(, )
	}
	if goarch.PtrSize == 8 &&  == 1 {

		.scanAlloc += 8
	} else {
		 := 
		 := uintptr()

		if doubleCheckHeapSetType && (!heapBitsInSpan() || !heapBitsInSpan(512)) {
			throw("tried to write heap bits, but no heap bits in span")
		}

		 := readUintptr(getGCMask())

		const  = 512

		 := .PtrBytes
		 := 
		if .Size_ == goarch.PtrSize {
			 = (1 << ( / goarch.PtrSize)) - 1
		} else {

			if doubleCheckHeapSetType && !asanenabled && %.Size_ != 0 {
				throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
			}
			for  := .Size_;  < ;  += .Size_ {
				 |=  << ( / goarch.PtrSize)
				 += .Size_
			}
		}

		,  := spanHeapBitsRange(.base(), pageSize, )
		 := unsafe.Pointer()
		 := ( - .base()) / goarch.PtrSize
		 :=  / ptrBits
		 :=  % ptrBits
		const  uintptr =  / goarch.PtrSize

		const  = &(-1) == 0
		if  > ptrBits || (! && + > ptrBits) {

			 := ptrBits - 
			 :=  - 
			 := (*uintptr)(add(, (+0)*goarch.PtrSize))
			 := (*uintptr)(add(, (+1)*goarch.PtrSize))
			* = (*)&(^uintptr(0)>>) | ( << )
			* = (*)&^((1<<)-1) | ( >> )
		} else {

			 := (*uintptr)(add(, *goarch.PtrSize))
			* = (*)&^(((1<<(min(, ptrBits)))-1)<<) | ( << )
		}

		const  = false
		if  {
			writeHeapBitsDoubleCheck(, , , , , , , , )
		}
		if doubleCheckHeapSetType {
			doubleCheckHeapType(, , , nil, )
		}
		.scanAlloc += 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny1( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 1

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny2( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 2

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny3( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 3

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny4( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 4

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny5( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 5

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny6( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 6

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny7( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 7

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny8( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 8

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny9( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 9

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny10( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 10

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny11( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 11

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny12( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 12

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny13( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 13

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny14( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 14

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocTiny15( uintptr,  *_type,  bool) unsafe.Pointer {

	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {
		return mallocgcSmallNoScanSC2(, , )
	}

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 15

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckTiny(, , )
	}
	.mallocing = 1

	 := getMCache()
	 := .tinyoffset

	if &7 == 0 {
		 = alignUp(, 8)
	} else if goarch.PtrSize == 4 &&  == 12 {

		 = alignUp(, 8)
	} else if &3 == 0 {
		 = alignUp(, 4)
	} else if &1 == 0 {
		 = alignUp(, 2)
	}
	if + <= maxTinySize && .tiny != 0 {

		 := unsafe.Pointer(.tiny + )
		.tinyoffset =  + 
		.tinyAllocs++
		.mallocing = 0
		releasem()
		const  = 0
		{

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	 := false
	 := .alloc[tinySpanClass]

	const  = 8192
	const  = uint16(( - unsafe.Sizeof(spanInlineMarkBits{})) /
		16,
	)
	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  <  {
			 :=  + 1
			if !(%64 == 0 &&  != ) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree(tinySpanClass)
	}
	 := unsafe.Pointer()
	(*[2]uint64)()[0] = 0
	(*[2]uint64)()[1] = 0

	if !raceenabled && ( < .tinyoffset || .tiny == 0) {

		.tiny = uintptr()
		.tinyoffset = 
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}

	if raceenabled {

		 = add(, -)
	}
	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC2( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 2

	const  = 16

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					16 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC3( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 3

	const  = 24

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					24 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC4( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 4

	const  = 32

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					32 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC5( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 5

	const  = 48

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					48 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC6( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 6

	const  = 64

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					64 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC7( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 7

	const  = 80

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					80 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC8( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 8

	const  = 96

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					96 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC9( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 9

	const  = 112

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					112 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC10( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 10

	const  = 128

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					128 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC11( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 11

	const  = 144

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					144 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC12( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 12

	const  = 160

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					160 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC13( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 13

	const  = 176

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					176 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC14( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 14

	const  = 192

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					192 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC15( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 15

	const  = 208

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					208 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC16( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 16

	const  = 224

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					224 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC17( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 17

	const  = 240

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					240 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC18( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 18

	const  = 256

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					256 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC19( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 19

	const  = 288

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					288 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC20( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 20

	const  = 320

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					320 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC21( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 21

	const  = 352

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					352 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC22( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 22

	const  = 384

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					384 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC23( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 23

	const  = 416

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					416 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC24( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 24

	const  = 448

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					448 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC25( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 25

	const  = 480

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					480 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}

func mallocgcSmallNoScanSC26( uintptr,  *_type,  bool) unsafe.Pointer {

	if doubleCheckMalloc {
		if gcphase == _GCmarktermination {
			throw("mallocgc called with gcphase == _GCmarktermination")
		}
	}

	lockRankMayQueueFinalizer()

	if debug.malloc {
		if  := preMallocgcDebug(, );  != nil {
			return 
		}
	}

	if gcBlackenEnabled != 0 {
		deductAssistCredit()
	}

	const  = 26

	const  = 512

	 := acquirem()
	if doubleCheckMalloc {
		doubleCheckSmallNoScan(, )
	}
	.mallocing = 1

	 := false
	 := getMCache()
	const  = spanClass(<<1) | spanClass(1)
	 := .alloc[]

	if runtimeFreegcEnabled && .hasReusableNoscan() {

		 := mallocgcSmallNoscanReuse(, , , , )
		.mallocing = 0
		releasem()
		 := 
		{

			 := getg()
			if goexperiment.RuntimeSecret && .secret > 0 {

				addSecret()
			}

			if valgrindenabled {
				valgrindMalloc(, )
			}

			if gcBlackenEnabled != 0 &&  != 0 {
				if  := getg().m.curg;  != nil {
					.gcAssistBytes -= int64( - )
				}
			}

			if debug.malloc {
				postMallocgcDebug(, , )
			}
			return 
		}

	}

	var  gclinkptr
	if .allocCache != 0 {
		 := sys.TrailingZeros64(.allocCache)
		 := .freeindex + uint16()
		if  < .nelems {
			 :=  + 1
			if !(%64 == 0 &&  != .nelems) {
				.allocCache >>= uint( + 1)
				.freeindex = 
				.allocCount++
				 = gclinkptr(uintptr()*
					512 +
					.base())
			}
		}
	}
	 := 
	if  == 0 {
		, ,  = .nextFree()
	}
	 := unsafe.Pointer()
	if  && .needzero != 0 {
		memclrNoHeapPointers(, )
	}

	publicationBarrier()

	if writeBarrier.enabled {

		gcmarknewobject(, uintptr())
	} else {

		.freeIndexForScan = .freeindex
	}

	.nextSample -= int64()
	if .nextSample < 0 || MemProfileRate != .memProfRate {
		profilealloc(, , )
	}
	.mallocing = 0
	releasem()

	if  {
		if  := (gcTrigger{kind: gcTriggerHeap}); .test() {
			gcStart()
		}
	}
	 := getg()
	if goexperiment.RuntimeSecret && .secret > 0 {

		addSecret()
	}

	if valgrindenabled {
		valgrindMalloc(, )
	}

	if gcBlackenEnabled != 0 &&  != 0 {
		if  := getg().m.curg;  != nil {
			.gcAssistBytes -= int64( - )
		}
	}

	if debug.malloc {
		postMallocgcDebug(, , )
	}
	return 
}