github.com/iDigitalFlame/xmt@v0.5.4/device/winapi/mem_v20.go (about)

     1  //go:build windows && cgo && freemem && go1.20
     2  // +build windows,cgo,freemem,go1.20
     3  
     4  // Copyright (C) 2020 - 2023 iDigitalFlame
     5  //
     6  // This program is free software: you can redistribute it and/or modify
     7  // it under the terms of the GNU General Public License as published by
     8  // the Free Software Foundation, either version 3 of the License, or
     9  // any later version.
    10  //
    11  // This program is distributed in the hope that it will be useful,
    12  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    13  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    14  // GNU General Public License for more details.
    15  //
    16  // You should have received a copy of the GNU General Public License
    17  // along with this program.  If not, see <https://www.gnu.org/licenses/>.
    18  //
    19  
    20  package winapi
    21  
    22  import "unsafe"
    23  
    24  const (
    25  	x64         = 1 << (^uintptr(0) >> 63) / 2
    26  	summarySize = 4 + (1 * x64)
    27  	chunkL1Bits = 1 << (13 * x64)
    28  	chunkL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (13 * x64))
    29  	arenaL1Bits = 1 << (6 * x64)
    30  	arenaL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (6 * x64))
    31  )
    32  
    33  //go:linkname gcBitsArenas runtime.gcBitsArenas
    34  var gcBitsArenas [5]uintptr
    35  
    36  type mheap struct {
    37  	_     uintptr
    38  	pages struct {
    39  		summary [summarySize][]uint64
    40  		chunks  [chunkL1Bits]*[chunkL2Bits][16]uint64
    41  		_       uintptr
    42  		_, _    uint
    43  		inUse   struct {
    44  			ranges [][2]uintptr
    45  			_, _   uintptr
    46  		}
    47  		scav struct {
    48  			_       int64
    49  			chunks  []uint8
    50  			_, _, _ int32
    51  			_       uintptr
    52  		}
    53  		_, _, _ uintptr
    54  		_       bool
    55  	}
    56  	_              uint32
    57  	allspans       []*mspan
    58  	_              uintptr
    59  	_, _, _        uint64
    60  	_              float64
    61  	_              uint64
    62  	_              uintptr
    63  	arenas         [arenaL1Bits]*[arenaL2Bits]uintptr
    64  	heapArenaAlloc linearAlloc
    65  	arenaHints     *arenaHint
    66  	area           linearAlloc
    67  	_, _, _        []uint
    68  	base           uintptr
    69  	_              uintptr
    70  	_              [136]struct {
    71  		_ uint8
    72  		_ [4]struct {
    73  			_, _, _, _ uintptr
    74  			_          uint64
    75  		}
    76  		_ uint64
    77  		_ [cacheLineSize - (1+(2*((4*ptrSize)+8)))%cacheLineSize]byte
    78  	}
    79  	spanalloc             fixalloc
    80  	cachealloc            fixalloc
    81  	specialfinalizeralloc fixalloc
    82  	specialprofilealloc   fixalloc
    83  	specialReachableAlloc fixalloc
    84  	_                     uintptr
    85  	arenaHintAlloc        fixalloc
    86  }
    87  type mspan struct {
    88  	_, _      *mspan
    89  	_         uintptr
    90  	startAddr uintptr
    91  }
    92  type fixalloc struct {
    93  	_, _, _, _ uintptr
    94  	chunk      uintptr
    95  	_, _       uint32
    96  	inuse      uintptr
    97  	_          uintptr
    98  	_          bool
    99  }
   100  type arenaHint struct {
   101  	addr uintptr
   102  	_    bool
   103  	next *arenaHint
   104  }
   105  type linearAlloc struct {
   106  	next      uintptr
   107  	mapped, _ uintptr
   108  	_         bool
   109  }
   110  
   111  func enumRuntimeMemory(h *mheap, m memoryMap) {
   112  	m.add(uintptr(unsafe.Pointer(&h.pages.scav.chunks[0])))
   113  	for i := range h.pages.summary {
   114  		m.add(uintptr(unsafe.Pointer(&h.pages.summary[i][0])))
   115  	}
   116  	for i := range h.pages.chunks {
   117  		if h.pages.chunks[i] == nil {
   118  			continue
   119  		}
   120  		m.add(uintptr(unsafe.Pointer(&h.pages.chunks[i])))
   121  	}
   122  	for i := range h.pages.inUse.ranges {
   123  		if h.pages.inUse.ranges[i][0] == 0 {
   124  			continue
   125  		}
   126  		m.add(h.pages.inUse.ranges[i][0])
   127  	}
   128  	for i := 1; i < len(gcBitsArenas); i++ {
   129  		m.add(gcBitsArenas[i])
   130  	}
   131  	if len(h.allspans) > 0 {
   132  		for i := range h.allspans {
   133  			if h.allspans[i] != nil {
   134  				m.add(h.allspans[i].startAddr)
   135  			}
   136  		}
   137  		m.add(uintptr(unsafe.Pointer(&h.allspans[0])))
   138  	}
   139  	for i := range h.arenas {
   140  		if h.arenas[i] == nil {
   141  			continue
   142  		}
   143  		if m.add(uintptr(unsafe.Pointer(h.arenas[i]))); x64 == 0 {
   144  			continue
   145  		}
   146  		for z := range h.arenas[i] {
   147  			if h.arenas[i][z] == 0 {
   148  				continue
   149  			}
   150  			m.add(uintptr(unsafe.Pointer(h.arenas[i][z])))
   151  		}
   152  	}
   153  	if m.add(h.area.next); h.area.mapped > 2 {
   154  		m.add(h.area.mapped - 2)
   155  	}
   156  	if m.add(h.heapArenaAlloc.next); h.heapArenaAlloc.mapped > 2 {
   157  		m.add(h.heapArenaAlloc.mapped - 2)
   158  	}
   159  	for x := h.arenaHints; x != nil; x = x.next {
   160  		m.add(x.addr)
   161  	}
   162  	m.add(h.base)
   163  	m.add(h.spanalloc.chunk)
   164  	m.add(h.spanalloc.inuse)
   165  	m.add(h.cachealloc.chunk)
   166  	m.add(h.cachealloc.inuse)
   167  	m.add(h.specialfinalizeralloc.chunk)
   168  	m.add(h.specialfinalizeralloc.inuse)
   169  	m.add(h.specialprofilealloc.chunk)
   170  	m.add(h.specialprofilealloc.inuse)
   171  	m.add(h.specialReachableAlloc.chunk)
   172  	m.add(h.specialReachableAlloc.inuse)
   173  	m.add(h.arenaHintAlloc.chunk)
   174  	m.add(h.arenaHintAlloc.inuse)
   175  }