// Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.

// GC checkmarks
//
// In a concurrent garbage collector, one worries about failing to mark
// a live object due to mutations without write barriers or bugs in the
// collector implementation. As a sanity check, the GC has a 'checkmark'
// mode that retraverses the object graph with the world stopped, to make
// sure that everything that should be marked is marked.

package runtime

import (
	
	
	
	
)

// A checkmarksMap stores the GC marks in "checkmarks" mode. It is a
// per-arena bitmap with a bit for every word in the arena. The mark
// is stored on the bit corresponding to the first word of the marked
// allocation.
type checkmarksMap struct {
	_ sys.NotInHeap
	b [heapArenaBytes / goarch.PtrSize / 8]uint8
}

// If useCheckmark is true, marking of an object uses the checkmark
// bits instead of the standard mark bits.
var useCheckmark = false

// startCheckmarks prepares for the checkmarks phase.
//
// The world must be stopped.
func startCheckmarks() {
	assertWorldStopped()

	// Clear all checkmarks.
	 := func( arenaIdx) {
		 := mheap_.arenas[.l1()][.l2()]
		 := .checkmarks

		if  == nil {
			// Allocate bitmap on first use.
			 = (*checkmarksMap)(persistentalloc(unsafe.Sizeof(*), 0, &memstats.gcMiscSys))
			if  == nil {
				throw("out of memory allocating checkmarks bitmap")
			}
			.checkmarks = 
		} else {
			// Otherwise clear the existing bitmap.
			clear(.b[:])
		}
	}
	for ,  := range mheap_.heapArenas {
		()
	}
	for ,  := range mheap_.userArenaArenas {
		()
	}

	// Enable checkmarking.
	useCheckmark = true
}

// endCheckmarks ends the checkmarks phase.
func endCheckmarks() {
	if gcMarkWorkAvailable(nil) {
		throw("GC work not flushed")
	}
	useCheckmark = false
}

// setCheckmark throws if marking object is a checkmarks violation,
// and otherwise sets obj's checkmark. It returns true if obj was
// already checkmarked.
func setCheckmark(, ,  uintptr,  markBits) bool {
	if !.isMarked() {
		printlock()
		print("runtime: checkmarks found unexpected unmarked object obj=", hex(), "\n")
		print("runtime: found obj at *(", hex(), "+", hex(), ")\n")

		// Dump the source (base) object
		gcDumpObject("base", , )

		// Dump the object
		gcDumpObject("obj", , ^uintptr(0))

		getg().m.traceback = 2
		throw("checkmark found unmarked object")
	}
	,  := getCheckmark()
	if  == nil {
		return false
	}
	if atomic.Load8()& != 0 {
		// Already checkmarked.
		return true
	}
	atomic.Or8(, )
	return false
}

func getCheckmark( uintptr) ( *byte,  uint8) {
	 := arenaIndex()
	 := mheap_.arenas[.l1()][.l2()]
	if  == nil {
		// Non-heap pointer.
		return nil, 0
	}
	 := ( - alignDown(, heapArenaBytes)) / goarch.PtrSize
	 :=  / 8
	 = byte(1 << ( % 8))
	 = &.checkmarks.b[]
	return , 
}

// runCheckmark runs a full non-parallel, stop-the-world mark using
// checkmark bits, to check that we didn't forget to mark anything
// during the concurrent mark process.
//
// The world must be stopped to call runCheckmark.
func runCheckmark( func(*gcWork)) {
	assertWorldStopped()

	// Turn off gcwaiting because that will force
	// gcDrain to return early if this goroutine
	// happens to have its preemption flag set.
	// This is fine because the world is stopped.
	// Restore it after we're done just to be safe.
	sched.gcwaiting.Store(false)
	startCheckmarks()
	gcResetMarkState()
	 := &getg().m.p.ptr().gcw
	()
	gcDrain(, 0)
	wbBufFlush1(getg().m.p.ptr())
	.dispose()
	endCheckmarks()
	sched.gcwaiting.Store(true)
}

// checkFinalizersAndCleanups uses checkmarks to check for potential issues
// with the program's use of cleanups and finalizers.
func checkFinalizersAndCleanups() {
	assertWorldStopped()

	const (
		 = 1 << iota
		
	)

	// Find the arena and page index into that arena for this shard.
	type  struct {
		 int
		    uintptr
		     *special
	}
	var  [50]
	var  int
	var  bool
	var  uintptr

	forEachSpecial(func( uintptr,  *mspan,  *special) bool {
		// N.B. The tiny block specials are sorted first in the specials list.
		if .kind == _KindSpecialTinyBlock {
			 = .base() + .offset
			return true
		}

		// We only care about finalizers and cleanups.
		if .kind != _KindSpecialFinalizer && .kind != _KindSpecialCleanup {
			return true
		}

		// Run a checkmark GC using this cleanup and/or finalizer as a root.
		if debug.checkfinalizers > 1 {
			print("Scan trace for cleanup/finalizer on ", hex(), ":\n")
		}
		runCheckmark(func( *gcWork) {
			switch .kind {
			case _KindSpecialFinalizer:
				gcScanFinalizer((*specialfinalizer)(unsafe.Pointer()), , )
			case _KindSpecialCleanup:
				gcScanCleanup((*specialCleanup)(unsafe.Pointer()), )
			}
		})
		if debug.checkfinalizers > 1 {
			println()
		}

		// Now check to see if the object the special is attached to was marked.
		// The roots above do not directly mark p, so if it is marked, then p
		// must be reachable from the finalizer and/or cleanup, preventing
		// reclamation.
		,  := getCheckmark()
		if  == nil {
			return true
		}
		var  int
		if atomic.Load8()& != 0 {
			 |= 
		}
		if  >=  &&  < +maxTinySize {
			 |= 
		}
		if  != 0 {
			if  >= len() {
				 = true
				return false
			}
			[] = {, , }
			++
		}
		return true
	})

	if  > 0 {
		 := uintptr(0)
		println("WARNING: LIKELY CLEANUP/FINALIZER ISSUES")
		println()
		for ,  := range [:] {
			var  *specialCheckFinalizer
			var  string
			if ..kind == _KindSpecialFinalizer {
				 = "finalizer"
				 = getCleanupContext(., 0)
			} else {
				 = "cleanup"
				 = getCleanupContext(., ((*specialCleanup)(unsafe.Pointer(.))).id)
			}

			// N.B. reports is sorted 'enough' that cleanups/finalizers on the same pointer will
			// appear consecutively because the specials list is sorted.
			if  != . {
				if  != 0 {
					println()
				}
				print("Value of type ", toRType(.ptrType).string(), " at ", hex(.), "\n")
				if .& != 0 {
					if ..kind == _KindSpecialFinalizer {
						println("  is reachable from finalizer")
					} else {
						println("  is reachable from cleanup or cleanup argument")
					}
				}
				if .& != 0 {
					println("  is in a tiny block with other (possibly long-lived) values")
				}
				if .& != 0 && .& != 0 {
					if ..kind == _KindSpecialFinalizer {
						println("  may be in the same tiny block as finalizer")
					} else {
						println("  may be in the same tiny block as cleanup or cleanup argument")
					}
				}
			}
			println()

			println("Has", , "at", hex(uintptr(unsafe.Pointer(.))))
			 := findfunc(.funcPC)
			if .valid() {
				,  := funcline(, .funcPC)
				print("  ", funcname(), "()\n")
				print("      ", , ":", , " +", hex(.funcPC-.entry()), "\n")
			} else {
				print("  <bad pc ", hex(.funcPC), ">\n")
			}

			println("created at: ")
			 := findfunc(.createPC)
			if .valid() {
				,  := funcline(, .createPC)
				print("  ", funcname(), "()\n")
				print("      ", , ":", , " +", hex(.createPC-.entry()), "\n")
			} else {
				print("  <bad pc ", hex(.createPC), ">\n")
			}

			 = .
		}
		println()
		if  {
			println("... too many potential issues ...")
		}
		throw("detected possible issues with cleanups and/or finalizers")
	}
}

// forEachSpecial is an iterator over all specials.
//
// Used by debug.checkfinalizers.
//
// The world must be stopped.
func forEachSpecial( func( uintptr,  *mspan,  *special) bool) {
	assertWorldStopped()

	// Find the arena and page index into that arena for this shard.
	for ,  := range mheap_.markArenas {
		 := mheap_.arenas[.l1()][.l2()]

		// Construct slice of bitmap which we'll iterate over.
		for  := range .pageSpecials[:] {
			// Find set bits, which correspond to spans with specials.
			 := atomic.Load8(&.pageSpecials[])
			if  == 0 {
				continue
			}
			for  := uint(0);  < 8; ++ {
				if &(1<<) == 0 {
					continue
				}
				// Find the span for this bit.
				//
				// This value is guaranteed to be non-nil because having
				// specials implies that the span is in-use, and since we're
				// currently marking we can be sure that we don't have to worry
				// about the span being freed and re-used.
				 := .spans[uint()*8+]

				// Lock the specials to prevent a special from being
				// removed from the list while we're traversing it.
				for  := .specials;  != nil;  = .next {
					if !(.base()+.offset, , ) {
						return
					}
				}
			}
		}
	}
}