github.com/iDigitalFlame/xmt@v0.5.4/device/winapi/mem_v14.go (about)

     1  //go:build windows && cgo && freemem && go1.14 && !go1.15
     2  // +build windows,cgo,freemem,go1.14,!go1.15
     3  
     4  // Copyright (C) 2020 - 2023 iDigitalFlame
     5  //
     6  // This program is free software: you can redistribute it and/or modify
     7  // it under the terms of the GNU General Public License as published by
     8  // the Free Software Foundation, either version 3 of the License, or
     9  // any later version.
    10  //
    11  // This program is distributed in the hope that it will be useful,
    12  // but WITHOUT ANY WARRANTY; without even the implied warranty of
    13  // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    14  // GNU General Public License for more details.
    15  //
    16  // You should have received a copy of the GNU General Public License
    17  // along with this program.  If not, see <https://www.gnu.org/licenses/>.
    18  //
    19  
    20  package winapi
    21  
    22  import "unsafe"
    23  
    24  const (
    25  	x64         = 1 << (^uintptr(0) >> 63) / 2
    26  	summarySize = 4 + (1 * x64)
    27  	chunkL1Bits = 1 << (13 * x64)
    28  	chunkL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (13 * x64))
    29  	arenaL1Bits = 1 << (6 * x64)
    30  	arenaL2Bits = 1 << ((x64*48 + (1-x64)*32) - 22 - (6 * x64))
    31  )
    32  
    33  //go:linkname gcBitsArenas runtime.gcBitsArenas
    34  var gcBitsArenas [4]uintptr
    35  
    36  type mheap struct {
    37  	_     uintptr
    38  	pages struct {
    39  		summary [summarySize][]uint64
    40  		chunks  [chunkL1Bits]*[chunkL2Bits][16]uint64
    41  		_, _, _ uintptr
    42  		_, _    uint
    43  		inUse   struct {
    44  			ranges [][2]uintptr
    45  			_      uintptr
    46  		}
    47  		_, _ uintptr
    48  		_    bool
    49  	}
    50  	_, _, _  uint32
    51  	allspans []*mspan
    52  	_        [2]struct {
    53  		_, _, _, _ uintptr
    54  		_          uint32
    55  	}
    56  	_, _, _, _     uint64
    57  	_              float64
    58  	_, _           uint64
    59  	_              uintptr
    60  	_, _, _, _     uint64
    61  	_              [67]uint64
    62  	arenas         [arenaL1Bits]*[arenaL2Bits]uintptr
    63  	heapArenaAlloc linearAlloc
    64  	arenaHints     *arenaHint
    65  	area           linearAlloc
    66  	_, _           []uint
    67  	base           uintptr
    68  	_              uintptr
    69  	_              uint32
    70  	_              [134]struct {
    71  		_          uintptr
    72  		_          uint8
    73  		_, _, _, _ uintptr
    74  		_          uint64
    75  		_          [cacheLineSize - ((5*ptrSize)+9)%cacheLineSize]byte
    76  	}
    77  	spanalloc             fixalloc
    78  	cachealloc            fixalloc
    79  	specialfinalizeralloc fixalloc
    80  	specialprofilealloc   fixalloc
    81  	_                     uintptr
    82  	arenaHintAlloc        fixalloc
    83  }
    84  type mspan struct {
    85  	_, _      *mspan
    86  	_         uintptr
    87  	startAddr uintptr
    88  }
    89  type fixalloc struct {
    90  	_, _, _, _ uintptr
    91  	chunk      uintptr
    92  	_          uint32
    93  	inuse      uintptr
    94  	_          uintptr
    95  	_          bool
    96  }
    97  type arenaHint struct {
    98  	addr uintptr
    99  	_    bool
   100  	next *arenaHint
   101  }
   102  type linearAlloc struct {
   103  	next      uintptr
   104  	mapped, _ uintptr
   105  }
   106  
   107  func enumRuntimeMemory(h *mheap, m memoryMap) {
   108  	for i := range h.pages.summary {
   109  		m.add(uintptr(unsafe.Pointer(&h.pages.summary[i][0])))
   110  	}
   111  	for i := range h.pages.chunks {
   112  		if h.pages.chunks[i] == nil {
   113  			continue
   114  		}
   115  		m.add(uintptr(unsafe.Pointer(&h.pages.chunks[i])))
   116  	}
   117  	for i := range h.pages.inUse.ranges {
   118  		if h.pages.inUse.ranges[i][0] == 0 {
   119  			continue
   120  		}
   121  		m.add(h.pages.inUse.ranges[i][0])
   122  	}
   123  	for i := 1; i < len(gcBitsArenas); i++ {
   124  		m.add(gcBitsArenas[i])
   125  	}
   126  	if len(h.allspans) > 0 {
   127  		for i := range h.allspans {
   128  			if h.allspans[i] != nil {
   129  				m.add(h.allspans[i].startAddr)
   130  			}
   131  		}
   132  		m.add(uintptr(unsafe.Pointer(&h.allspans[0])))
   133  	}
   134  	for i := range h.arenas {
   135  		if h.arenas[i] == nil {
   136  			continue
   137  		}
   138  		if m.add(uintptr(unsafe.Pointer(h.arenas[i]))); x64 == 0 {
   139  			continue
   140  		}
   141  		for z := range h.arenas[i] {
   142  			if h.arenas[i][z] == 0 {
   143  				continue
   144  			}
   145  			m.add(uintptr(unsafe.Pointer(h.arenas[i][z])))
   146  		}
   147  	}
   148  	if m.add(h.area.next); h.area.mapped > 2 {
   149  		m.add(h.area.mapped - 2)
   150  	}
   151  	if m.add(h.heapArenaAlloc.next); h.heapArenaAlloc.mapped > 2 {
   152  		m.add(h.heapArenaAlloc.mapped - 2)
   153  	}
   154  	for x := h.arenaHints; x != nil; x = x.next {
   155  		m.add(x.addr)
   156  	}
   157  	m.add(h.base)
   158  	m.add(h.spanalloc.chunk)
   159  	m.add(h.spanalloc.inuse)
   160  	m.add(h.cachealloc.chunk)
   161  	m.add(h.cachealloc.inuse)
   162  	m.add(h.specialfinalizeralloc.chunk)
   163  	m.add(h.specialfinalizeralloc.inuse)
   164  	m.add(h.specialprofilealloc.chunk)
   165  	m.add(h.specialprofilealloc.inuse)
   166  	m.add(h.arenaHintAlloc.chunk)
   167  	m.add(h.arenaHintAlloc.inuse)
   168  }