github.com/iDigitalFlame/xmt@v0.5.4/device/winapi/mem_v19.go (about)

     1  //go:build windows && cgo && freemem && go1.19 && !go1.20
     2  // +build windows,cgo,freemem,go1.19,!go1.20
     3  
     4  // Copyright (C) 2020 - 2023 iDigitalFlame
     5  //
     6  // This program is free software: you can redistribute it and/or modify
     7  // it under the terms of the GNU General Public License as published by
     8  // the Free Software Foundation, either version 3 of the License, or
     9  // any later version.
    10  //
    11  // This program is distributed in the hope that it will be useful,
    12  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    13  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    14  // GNU General Public License for more details.
    15  //
    16  // You should have received a copy of the GNU General Public License
    17  // along with this program.  If not, see <https://www.gnu.org/licenses/>.
    18  //
    19  
    20  package winapi
    21  
    22  import "unsafe"
    23  
    24  const (
    25  	x64         = 1 << (^uintptr(0) >> 63) / 2
    26  	summarySize = 4 + (1 * x64)
    27  	chunkL1Bits = 1 << (13 * x64)
    28  	chunkL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (13 * x64))
    29  	arenaL1Bits = 1 << (6 * x64)
    30  	arenaL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (6 * x64))
    31  )
    32  
    33  //go:linkname gcBitsArenas runtime.gcBitsArenas
    34  var gcBitsArenas [4]uintptr
    35  
    36  type mheap struct {
    37  	_     uintptr
    38  	pages struct {
    39  		summary [summarySize][]uint64
    40  		chunks  [chunkL1Bits]*[chunkL2Bits][16]uint64
    41  		_       uintptr
    42  		_, _    uint
    43  		inUse   struct {
    44  			ranges [][2]uintptr
    45  			_, _   uintptr
    46  		}
    47  		_    uint32
    48  		scav struct {
    49  			_       int64
    50  			chunks  []uint8
    51  			_, _, _ int32
    52  			_       uintptr
    53  			_       uint32
    54  			_       int64
    55  		}
    56  		_, _, _ uintptr
    57  		_       bool
    58  	}
    59  	_              uint32
    60  	allspans       []*mspan
    61  	_, _, _, _     uint64
    62  	_              float64
    63  	_              uint64
    64  	_              uintptr
    65  	arenas         [arenaL1Bits]*[arenaL2Bits]uintptr
    66  	heapArenaAlloc linearAlloc
    67  	arenaHints     *arenaHint
    68  	area           linearAlloc
    69  	_, _, _        []uint
    70  	base           uintptr
    71  	_              uintptr
    72  	_              uint32
    73  	_              [136]struct {
    74  		_ uint8
    75  		_ [4]struct {
    76  			_, _, _, _ uintptr
    77  			_          uint64
    78  		}
    79  		_ uint64
    80  		_ [cacheLineSize - (1+(2*((4*ptrSize)+8)))%cacheLineSize]byte
    81  	}
    82  	spanalloc             fixalloc
    83  	cachealloc            fixalloc
    84  	specialfinalizeralloc fixalloc
    85  	specialprofilealloc   fixalloc
    86  	specialReachableAlloc fixalloc
    87  	_                     uintptr
    88  	arenaHintAlloc        fixalloc
    89  }
    90  type mspan struct {
    91  	_, _      *mspan
    92  	_         uintptr
    93  	startAddr uintptr
    94  }
    95  type fixalloc struct {
    96  	_, _, _, _ uintptr
    97  	chunk      uintptr
    98  	_, _       uint32
    99  	inuse      uintptr
   100  	_          uintptr
   101  	_          bool
   102  }
   103  type arenaHint struct {
   104  	addr uintptr
   105  	_    bool
   106  	next *arenaHint
   107  }
   108  type linearAlloc struct {
   109  	next      uintptr
   110  	mapped, _ uintptr
   111  	_         bool
   112  }
   113  
   114  func enumRuntimeMemory(h *mheap, m memoryMap) {
   115  	m.add(uintptr(unsafe.Pointer(&h.pages.scav.chunks[0])))
   116  	for i := range h.pages.summary {
   117  		m.add(uintptr(unsafe.Pointer(&h.pages.summary[i][0])))
   118  	}
   119  	for i := range h.pages.chunks {
   120  		if h.pages.chunks[i] == nil {
   121  			continue
   122  		}
   123  		m.add(uintptr(unsafe.Pointer(&h.pages.chunks[i])))
   124  	}
   125  	for i := range h.pages.inUse.ranges {
   126  		if h.pages.inUse.ranges[i][0] == 0 {
   127  			continue
   128  		}
   129  		m.add(h.pages.inUse.ranges[i][0])
   130  	}
   131  	for i := 1; i < len(gcBitsArenas); i++ {
   132  		m.add(gcBitsArenas[i])
   133  	}
   134  	if len(h.allspans) > 0 {
   135  		for i := range h.allspans {
   136  			if h.allspans[i] != nil {
   137  				m.add(h.allspans[i].startAddr)
   138  			}
   139  		}
   140  		m.add(uintptr(unsafe.Pointer(&h.allspans[0])))
   141  	}
   142  	for i := range h.arenas {
   143  		if h.arenas[i] == nil {
   144  			continue
   145  		}
   146  		if m.add(uintptr(unsafe.Pointer(h.arenas[i]))); x64 == 0 {
   147  			continue
   148  		}
   149  		for z := range h.arenas[i] {
   150  			if h.arenas[i][z] == 0 {
   151  				continue
   152  			}
   153  			m.add(uintptr(unsafe.Pointer(h.arenas[i][z])))
   154  		}
   155  	}
   156  	if m.add(h.area.next); h.area.mapped > 2 {
   157  		m.add(h.area.mapped - 2)
   158  	}
   159  	if m.add(h.heapArenaAlloc.next); h.heapArenaAlloc.mapped > 2 {
   160  		m.add(h.heapArenaAlloc.mapped - 2)
   161  	}
   162  	for x := h.arenaHints; x != nil; x = x.next {
   163  		m.add(x.addr)
   164  	}
   165  	m.add(h.base)
   166  	m.add(h.spanalloc.chunk)
   167  	m.add(h.spanalloc.inuse)
   168  	m.add(h.cachealloc.chunk)
   169  	m.add(h.cachealloc.inuse)
   170  	m.add(h.specialfinalizeralloc.chunk)
   171  	m.add(h.specialfinalizeralloc.inuse)
   172  	m.add(h.specialprofilealloc.chunk)
   173  	m.add(h.specialprofilealloc.inuse)
   174  	m.add(h.specialReachableAlloc.chunk)
   175  	m.add(h.specialReachableAlloc.inuse)
   176  	m.add(h.arenaHintAlloc.chunk)
   177  	m.add(h.arenaHintAlloc.inuse)
   178  }