github.com/iDigitalFlame/xmt@v0.5.4/device/winapi/mem_v16.go (about)

     1  //go:build windows && cgo && freemem && go1.16 && !go1.17
     2  // +build windows,cgo,freemem,go1.16,!go1.17
     3  
     4  // Copyright (C) 2020 - 2023 iDigitalFlame
     5  //
     6  // This program is free software: you can redistribute it and/or modify
     7  // it under the terms of the GNU General Public License as published by
     8  // the Free Software Foundation, either version 3 of the License, or
     9  // any later version.
    10  //
    11  // This program is distributed in the hope that it will be useful,
    12  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    13  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    14  // GNU General Public License for more details.
    15  //
    16  // You should have received a copy of the GNU General Public License
    17  // along with this program.  If not, see <https://www.gnu.org/licenses/>.
    18  //
    19  
    20  package winapi
    21  
    22  import "unsafe"
    23  
    24  const (
    25  	x64         = 1 << (^uintptr(0) >> 63) / 2
    26  	summarySize = 4 + (1 * x64)
    27  	chunkL1Bits = 1 << (13 * x64)
    28  	chunkL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (13 * x64))
    29  	arenaL1Bits = 1 << (6 * x64)
    30  	arenaL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (6 * x64))
    31  )
    32  
    33  //go:linkname gcBitsArenas runtime.gcBitsArenas
    34  var gcBitsArenas [4]uintptr
    35  
    36  type mheap struct {
    37  	_     uintptr
    38  	pages struct {
    39  		summary [summarySize][]uint64
    40  		chunks  [chunkL1Bits]*[chunkL2Bits][16]uint64
    41  		_       uintptr
    42  		_, _    uint
    43  		inUse   struct {
    44  			ranges [][2]uintptr
    45  			_, _   uintptr
    46  		}
    47  		scav struct {
    48  			_ struct {
    49  				_    [][2]uintptr
    50  				_, _ uintptr
    51  			}
    52  			_          uint32
    53  			_, _, _, _ uintptr
    54  		}
    55  		_, _ uintptr
    56  		_    bool
    57  	}
    58  	_, _, _        uint32
    59  	allspans       []*mspan
    60  	_              uint32
    61  	_, _, _, _     uint64
    62  	_              float64
    63  	_, _           uint64
    64  	_              uintptr
    65  	arenas         [arenaL1Bits]*[arenaL2Bits]uintptr
    66  	heapArenaAlloc linearAlloc
    67  	arenaHints     *arenaHint
    68  	area           linearAlloc
    69  	_, _, _        []uint
    70  	base           uintptr
    71  	_              uintptr
    72  	_              uint32
    73  	_              [136]struct {
    74  		_ uint8
    75  		_ [4]struct {
    76  			_, _, _, _ uintptr
    77  			_          uint64
    78  		}
    79  		_ uint64
    80  		_ [cacheLineSize - (1+(2*((4*ptrSize)+8)))%cacheLineSize]byte
    81  	}
    82  	spanalloc             fixalloc
    83  	cachealloc            fixalloc
    84  	specialfinalizeralloc fixalloc
    85  	specialprofilealloc   fixalloc
    86  	_                     uintptr
    87  	arenaHintAlloc        fixalloc
    88  }
    89  type mspan struct {
    90  	_, _      *mspan
    91  	_         uintptr
    92  	startAddr uintptr
    93  }
    94  type fixalloc struct {
    95  	_, _, _, _ uintptr
    96  	chunk      uintptr
    97  	_          uint32
    98  	inuse      uintptr
    99  	_          uintptr
   100  	_          bool
   101  }
   102  type arenaHint struct {
   103  	addr uintptr
   104  	_    bool
   105  	next *arenaHint
   106  }
   107  type linearAlloc struct {
   108  	next      uintptr
   109  	mapped, _ uintptr
   110  }
   111  
   112  func enumRuntimeMemory(h *mheap, m memoryMap) {
   113  	for i := range h.pages.summary {
   114  		m.add(uintptr(unsafe.Pointer(&h.pages.summary[i][0])))
   115  	}
   116  	for i := range h.pages.chunks {
   117  		if h.pages.chunks[i] == nil {
   118  			continue
   119  		}
   120  		m.add(uintptr(unsafe.Pointer(&h.pages.chunks[i])))
   121  	}
   122  	for i := range h.pages.inUse.ranges {
   123  		if h.pages.inUse.ranges[i][0] == 0 {
   124  			continue
   125  		}
   126  		m.add(h.pages.inUse.ranges[i][0])
   127  	}
   128  	for i := 1; i < len(gcBitsArenas); i++ {
   129  		m.add(gcBitsArenas[i])
   130  	}
   131  	if len(h.allspans) > 0 {
   132  		for i := range h.allspans {
   133  			if h.allspans[i] != nil {
   134  				m.add(h.allspans[i].startAddr)
   135  			}
   136  		}
   137  		m.add(uintptr(unsafe.Pointer(&h.allspans[0])))
   138  	}
   139  	for i := range h.arenas {
   140  		if h.arenas[i] == nil {
   141  			continue
   142  		}
   143  		if m.add(uintptr(unsafe.Pointer(h.arenas[i]))); x64 == 0 {
   144  			continue
   145  		}
   146  		for z := range h.arenas[i] {
   147  			if h.arenas[i][z] == 0 {
   148  				continue
   149  			}
   150  			m.add(uintptr(unsafe.Pointer(h.arenas[i][z])))
   151  		}
   152  	}
   153  	if m.add(h.area.next); h.area.mapped > 2 {
   154  		m.add(h.area.mapped - 2)
   155  	}
   156  	if m.add(h.heapArenaAlloc.next); h.heapArenaAlloc.mapped > 2 {
   157  		m.add(h.heapArenaAlloc.mapped - 2)
   158  	}
   159  	for x := h.arenaHints; x != nil; x = x.next {
   160  		m.add(x.addr)
   161  	}
   162  	m.add(h.base)
   163  	m.add(h.spanalloc.chunk)
   164  	m.add(h.spanalloc.inuse)
   165  	m.add(h.cachealloc.chunk)
   166  	m.add(h.cachealloc.inuse)
   167  	m.add(h.specialfinalizeralloc.chunk)
   168  	m.add(h.specialfinalizeralloc.inuse)
   169  	m.add(h.specialprofilealloc.chunk)
   170  	m.add(h.specialprofilealloc.inuse)
   171  	m.add(h.arenaHintAlloc.chunk)
   172  	m.add(h.arenaHintAlloc.inuse)
   173  }