gioui.org@v0.6.1-0.20240506124620-7a9ce51988ce/gpu/internal/vulkan/vulkan.go (about)

     1  // SPDX-License-Identifier: Unlicense OR MIT
     2  
     3  //go:build (linux || freebsd) && !novulkan
     4  // +build linux freebsd
     5  // +build !novulkan
     6  
     7  package vulkan
     8  
     9  import (
    10  	"errors"
    11  	"fmt"
    12  	"image"
    13  	"math/bits"
    14  
    15  	"gioui.org/gpu/internal/driver"
    16  	"gioui.org/internal/vk"
    17  	"gioui.org/shader"
    18  )
    19  
    20  type Backend struct {
    21  	physDev vk.PhysicalDevice
    22  	dev     vk.Device
    23  	queue   vk.Queue
    24  	cmdPool struct {
    25  		current vk.CommandBuffer
    26  		pool    vk.CommandPool
    27  		used    int
    28  		buffers []vk.CommandBuffer
    29  	}
    30  	outFormat vk.Format
    31  	staging   struct {
    32  		buf  *Buffer
    33  		mem  []byte
    34  		size int
    35  		cap  int
    36  	}
    37  	defers     []func(d vk.Device)
    38  	frameSig   vk.Semaphore
    39  	frameFence vk.Fence
    40  	waitSems   []vk.Semaphore
    41  	waitStages []vk.PipelineStageFlags
    42  	sigSems    []vk.Semaphore
    43  	fence      vk.Fence
    44  
    45  	allPipes []*Pipeline
    46  
    47  	pipe *Pipeline
    48  
    49  	passes map[passKey]vk.RenderPass
    50  
    51  	// bindings and offset are temporary storage for BindVertexBuffer.
    52  	bindings []vk.Buffer
    53  	offsets  []vk.DeviceSize
    54  
    55  	desc struct {
    56  		dirty    bool
    57  		texBinds [texUnits]*Texture
    58  		bufBinds [storageUnits]*Buffer
    59  	}
    60  
    61  	caps driver.Features
    62  }
    63  
    64  type passKey struct {
    65  	fmt         vk.Format
    66  	loadAct     vk.AttachmentLoadOp
    67  	initLayout  vk.ImageLayout
    68  	finalLayout vk.ImageLayout
    69  }
    70  
    71  type Texture struct {
    72  	backend    *Backend
    73  	img        vk.Image
    74  	mem        vk.DeviceMemory
    75  	view       vk.ImageView
    76  	sampler    vk.Sampler
    77  	fbo        vk.Framebuffer
    78  	format     vk.Format
    79  	mipmaps    int
    80  	layout     vk.ImageLayout
    81  	passLayout vk.ImageLayout
    82  	width      int
    83  	height     int
    84  	acquire    vk.Semaphore
    85  	foreign    bool
    86  
    87  	scope struct {
    88  		stage  vk.PipelineStageFlags
    89  		access vk.AccessFlags
    90  	}
    91  }
    92  
    93  type Shader struct {
    94  	dev       vk.Device
    95  	module    vk.ShaderModule
    96  	pushRange vk.PushConstantRange
    97  	src       shader.Sources
    98  }
    99  
   100  type Pipeline struct {
   101  	backend    *Backend
   102  	pipe       vk.Pipeline
   103  	pushRanges []vk.PushConstantRange
   104  	ninputs    int
   105  	desc       *descPool
   106  }
   107  
   108  type descPool struct {
   109  	layout     vk.PipelineLayout
   110  	descLayout vk.DescriptorSetLayout
   111  	pool       vk.DescriptorPool
   112  	sets       []vk.DescriptorSet
   113  	size       int
   114  	texBinds   []int
   115  	imgBinds   []int
   116  	bufBinds   []int
   117  }
   118  
   119  type Buffer struct {
   120  	backend *Backend
   121  	buf     vk.Buffer
   122  	store   []byte
   123  	mem     vk.DeviceMemory
   124  	usage   vk.BufferUsageFlags
   125  
   126  	scope struct {
   127  		stage  vk.PipelineStageFlags
   128  		access vk.AccessFlags
   129  	}
   130  }
   131  
   132  const (
   133  	texUnits     = 4
   134  	storageUnits = 4
   135  )
   136  
   137  func init() {
   138  	driver.NewVulkanDevice = newVulkanDevice
   139  }
   140  
   141  func newVulkanDevice(api driver.Vulkan) (driver.Device, error) {
   142  	b := &Backend{
   143  		physDev:   vk.PhysicalDevice(api.PhysDevice),
   144  		dev:       vk.Device(api.Device),
   145  		outFormat: vk.Format(api.Format),
   146  		caps:      driver.FeatureCompute,
   147  		passes:    make(map[passKey]vk.RenderPass),
   148  	}
   149  	b.queue = vk.GetDeviceQueue(b.dev, api.QueueFamily, api.QueueIndex)
   150  	cmdPool, err := vk.CreateCommandPool(b.dev, api.QueueFamily)
   151  	if err != nil {
   152  		return nil, err
   153  	}
   154  	b.cmdPool.pool = cmdPool
   155  	props := vk.GetPhysicalDeviceFormatProperties(b.physDev, vk.FORMAT_R16_SFLOAT)
   156  	reqs := vk.FORMAT_FEATURE_COLOR_ATTACHMENT_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_BIT
   157  	if props&reqs == reqs {
   158  		b.caps |= driver.FeatureFloatRenderTargets
   159  	}
   160  	reqs = vk.FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_BIT | vk.FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT
   161  	props = vk.GetPhysicalDeviceFormatProperties(b.physDev, vk.FORMAT_R8G8B8A8_SRGB)
   162  	if props&reqs == reqs {
   163  		b.caps |= driver.FeatureSRGB
   164  	}
   165  	fence, err := vk.CreateFence(b.dev, 0)
   166  	if err != nil {
   167  		return nil, mapErr(err)
   168  	}
   169  	b.fence = fence
   170  	return b, nil
   171  }
   172  
   173  func (b *Backend) BeginFrame(target driver.RenderTarget, clear bool, viewport image.Point) driver.Texture {
   174  	b.staging.size = 0
   175  	b.cmdPool.used = 0
   176  	b.runDefers()
   177  	b.resetPipes()
   178  
   179  	if target == nil {
   180  		return nil
   181  	}
   182  	switch t := target.(type) {
   183  	case driver.VulkanRenderTarget:
   184  		layout := vk.IMAGE_LAYOUT_UNDEFINED
   185  		if !clear {
   186  			layout = vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
   187  		}
   188  		b.frameSig = vk.Semaphore(t.SignalSem)
   189  		b.frameFence = vk.Fence(t.Fence)
   190  		tex := &Texture{
   191  			img:        vk.Image(t.Image),
   192  			fbo:        vk.Framebuffer(t.Framebuffer),
   193  			width:      viewport.X,
   194  			height:     viewport.Y,
   195  			layout:     layout,
   196  			passLayout: vk.IMAGE_LAYOUT_PRESENT_SRC_KHR,
   197  			format:     b.outFormat,
   198  			acquire:    vk.Semaphore(t.WaitSem),
   199  			foreign:    true,
   200  		}
   201  		return tex
   202  	case *Texture:
   203  		return t
   204  	default:
   205  		panic(fmt.Sprintf("vulkan: unsupported render target type: %T", t))
   206  	}
   207  }
   208  
   209  func (b *Backend) deferFunc(f func(d vk.Device)) {
   210  	b.defers = append(b.defers, f)
   211  }
   212  
   213  func (b *Backend) runDefers() {
   214  	for _, f := range b.defers {
   215  		f(b.dev)
   216  	}
   217  	b.defers = b.defers[:0]
   218  }
   219  
   220  func (b *Backend) resetPipes() {
   221  	for i := len(b.allPipes) - 1; i >= 0; i-- {
   222  		p := b.allPipes[i]
   223  		if p.pipe == 0 {
   224  			// Released pipeline.
   225  			b.allPipes = append(b.allPipes[:i], b.allPipes[:i+1]...)
   226  			continue
   227  		}
   228  		if p.desc.size > 0 {
   229  			p.desc.size = 0
   230  		}
   231  	}
   232  }
   233  
   234  func (b *Backend) EndFrame() {
   235  	if b.frameSig != 0 {
   236  		b.sigSems = append(b.sigSems, b.frameSig)
   237  		b.frameSig = 0
   238  	}
   239  	fence := b.frameFence
   240  	if fence == 0 {
   241  		// We're internally synchronized.
   242  		fence = b.fence
   243  	}
   244  	b.submitCmdBuf(fence)
   245  	if b.frameFence == 0 {
   246  		vk.WaitForFences(b.dev, fence)
   247  		vk.ResetFences(b.dev, fence)
   248  	}
   249  }
   250  
   251  func (b *Backend) Caps() driver.Caps {
   252  	return driver.Caps{
   253  		MaxTextureSize: 4096,
   254  		Features:       b.caps,
   255  	}
   256  }
   257  
   258  func (b *Backend) NewTimer() driver.Timer {
   259  	panic("timers not supported")
   260  }
   261  
   262  func (b *Backend) IsTimeContinuous() bool {
   263  	panic("timers not supported")
   264  }
   265  
   266  func (b *Backend) Release() {
   267  	vk.DeviceWaitIdle(b.dev)
   268  	if buf := b.staging.buf; buf != nil {
   269  		vk.UnmapMemory(b.dev, b.staging.buf.mem)
   270  		buf.Release()
   271  	}
   272  	b.runDefers()
   273  	for _, rp := range b.passes {
   274  		vk.DestroyRenderPass(b.dev, rp)
   275  	}
   276  	vk.DestroyFence(b.dev, b.fence)
   277  	vk.FreeCommandBuffers(b.dev, b.cmdPool.pool, b.cmdPool.buffers...)
   278  	vk.DestroyCommandPool(b.dev, b.cmdPool.pool)
   279  	*b = Backend{}
   280  }
   281  
   282  func (b *Backend) NewTexture(format driver.TextureFormat, width, height int, minFilter, magFilter driver.TextureFilter, bindings driver.BufferBinding) (driver.Texture, error) {
   283  	vkfmt := formatFor(format)
   284  	usage := vk.IMAGE_USAGE_TRANSFER_DST_BIT | vk.IMAGE_USAGE_TRANSFER_SRC_BIT
   285  	passLayout := vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
   286  	if bindings&driver.BufferBindingTexture != 0 {
   287  		usage |= vk.IMAGE_USAGE_SAMPLED_BIT
   288  		passLayout = vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
   289  	}
   290  	if bindings&driver.BufferBindingFramebuffer != 0 {
   291  		usage |= vk.IMAGE_USAGE_COLOR_ATTACHMENT_BIT
   292  	}
   293  	if bindings&(driver.BufferBindingShaderStorageRead|driver.BufferBindingShaderStorageWrite) != 0 {
   294  		usage |= vk.IMAGE_USAGE_STORAGE_BIT
   295  	}
   296  	filterFor := func(f driver.TextureFilter) vk.Filter {
   297  		switch f {
   298  		case driver.FilterLinear, driver.FilterLinearMipmapLinear:
   299  			return vk.FILTER_LINEAR
   300  		case driver.FilterNearest:
   301  			return vk.FILTER_NEAREST
   302  		}
   303  		panic("unknown filter")
   304  	}
   305  	mipmapMode := vk.SAMPLER_MIPMAP_MODE_NEAREST
   306  	mipmap := minFilter == driver.FilterLinearMipmapLinear
   307  	nmipmaps := 1
   308  	if mipmap {
   309  		mipmapMode = vk.SAMPLER_MIPMAP_MODE_LINEAR
   310  		dim := width
   311  		if height > dim {
   312  			dim = height
   313  		}
   314  		log2 := 32 - bits.LeadingZeros32(uint32(dim)) - 1
   315  		nmipmaps = log2 + 1
   316  	}
   317  	sampler, err := vk.CreateSampler(b.dev, filterFor(minFilter), filterFor(magFilter), mipmapMode)
   318  	if err != nil {
   319  		return nil, mapErr(err)
   320  	}
   321  	img, mem, err := vk.CreateImage(b.physDev, b.dev, vkfmt, width, height, nmipmaps, usage)
   322  	if err != nil {
   323  		vk.DestroySampler(b.dev, sampler)
   324  		return nil, mapErr(err)
   325  	}
   326  	view, err := vk.CreateImageView(b.dev, img, vkfmt)
   327  	if err != nil {
   328  		vk.DestroySampler(b.dev, sampler)
   329  		vk.DestroyImage(b.dev, img)
   330  		vk.FreeMemory(b.dev, mem)
   331  		return nil, mapErr(err)
   332  	}
   333  	t := &Texture{backend: b, img: img, mem: mem, view: view, sampler: sampler, layout: vk.IMAGE_LAYOUT_UNDEFINED, passLayout: passLayout, width: width, height: height, format: vkfmt, mipmaps: nmipmaps}
   334  	if bindings&driver.BufferBindingFramebuffer != 0 {
   335  		pass, err := vk.CreateRenderPass(b.dev, vkfmt, vk.ATTACHMENT_LOAD_OP_DONT_CARE,
   336  			vk.IMAGE_LAYOUT_UNDEFINED, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, nil)
   337  		if err != nil {
   338  			return nil, mapErr(err)
   339  		}
   340  		defer vk.DestroyRenderPass(b.dev, pass)
   341  		fbo, err := vk.CreateFramebuffer(b.dev, pass, view, width, height)
   342  		if err != nil {
   343  			return nil, mapErr(err)
   344  		}
   345  		t.fbo = fbo
   346  	}
   347  	return t, nil
   348  }
   349  
   350  func (b *Backend) NewBuffer(bindings driver.BufferBinding, size int) (driver.Buffer, error) {
   351  	if bindings&driver.BufferBindingUniforms != 0 {
   352  		// Implement uniform buffers as inline push constants.
   353  		return &Buffer{store: make([]byte, size)}, nil
   354  	}
   355  	usage := vk.BUFFER_USAGE_TRANSFER_DST_BIT | vk.BUFFER_USAGE_TRANSFER_SRC_BIT
   356  	if bindings&driver.BufferBindingIndices != 0 {
   357  		usage |= vk.BUFFER_USAGE_INDEX_BUFFER_BIT
   358  	}
   359  	if bindings&(driver.BufferBindingShaderStorageRead|driver.BufferBindingShaderStorageWrite) != 0 {
   360  		usage |= vk.BUFFER_USAGE_STORAGE_BUFFER_BIT
   361  	}
   362  	if bindings&driver.BufferBindingVertices != 0 {
   363  		usage |= vk.BUFFER_USAGE_VERTEX_BUFFER_BIT
   364  	}
   365  	buf, err := b.newBuffer(size, usage, vk.MEMORY_PROPERTY_DEVICE_LOCAL_BIT)
   366  	return buf, mapErr(err)
   367  }
   368  
   369  func (b *Backend) newBuffer(size int, usage vk.BufferUsageFlags, props vk.MemoryPropertyFlags) (*Buffer, error) {
   370  	buf, mem, err := vk.CreateBuffer(b.physDev, b.dev, size, usage, props)
   371  	return &Buffer{backend: b, buf: buf, mem: mem, usage: usage}, err
   372  }
   373  
   374  func (b *Backend) NewImmutableBuffer(typ driver.BufferBinding, data []byte) (driver.Buffer, error) {
   375  	buf, err := b.NewBuffer(typ, len(data))
   376  	if err != nil {
   377  		return nil, err
   378  	}
   379  	buf.Upload(data)
   380  	return buf, nil
   381  }
   382  
   383  func (b *Backend) NewVertexShader(src shader.Sources) (driver.VertexShader, error) {
   384  	sh, err := b.newShader(src, vk.SHADER_STAGE_VERTEX_BIT)
   385  	return sh, mapErr(err)
   386  }
   387  
   388  func (b *Backend) NewFragmentShader(src shader.Sources) (driver.FragmentShader, error) {
   389  	sh, err := b.newShader(src, vk.SHADER_STAGE_FRAGMENT_BIT)
   390  	return sh, mapErr(err)
   391  }
   392  
   393  func (b *Backend) NewPipeline(desc driver.PipelineDesc) (driver.Pipeline, error) {
   394  	vs := desc.VertexShader.(*Shader)
   395  	fs := desc.FragmentShader.(*Shader)
   396  	var ranges []vk.PushConstantRange
   397  	if r := vs.pushRange; r != (vk.PushConstantRange{}) {
   398  		ranges = append(ranges, r)
   399  	}
   400  	if r := fs.pushRange; r != (vk.PushConstantRange{}) {
   401  		ranges = append(ranges, r)
   402  	}
   403  	descPool, err := createPipelineLayout(b.dev, fs.src, ranges)
   404  	if err != nil {
   405  		return nil, mapErr(err)
   406  	}
   407  	blend := desc.BlendDesc
   408  	factorFor := func(f driver.BlendFactor) vk.BlendFactor {
   409  		switch f {
   410  		case driver.BlendFactorZero:
   411  			return vk.BLEND_FACTOR_ZERO
   412  		case driver.BlendFactorOne:
   413  			return vk.BLEND_FACTOR_ONE
   414  		case driver.BlendFactorOneMinusSrcAlpha:
   415  			return vk.BLEND_FACTOR_ONE_MINUS_SRC_ALPHA
   416  		case driver.BlendFactorDstColor:
   417  			return vk.BLEND_FACTOR_DST_COLOR
   418  		default:
   419  			panic("unknown blend factor")
   420  		}
   421  	}
   422  	var top vk.PrimitiveTopology
   423  	switch desc.Topology {
   424  	case driver.TopologyTriangles:
   425  		top = vk.PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
   426  	case driver.TopologyTriangleStrip:
   427  		top = vk.PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
   428  	default:
   429  		panic("unknown topology")
   430  	}
   431  	var binds []vk.VertexInputBindingDescription
   432  	var attrs []vk.VertexInputAttributeDescription
   433  	inputs := desc.VertexLayout.Inputs
   434  	for i, inp := range inputs {
   435  		binds = append(binds, vk.VertexInputBindingDescription{
   436  			Binding: i,
   437  			Stride:  desc.VertexLayout.Stride,
   438  		})
   439  		attrs = append(attrs, vk.VertexInputAttributeDescription{
   440  			Binding:  i,
   441  			Location: vs.src.Inputs[i].Location,
   442  			Format:   vertFormatFor(vs.src.Inputs[i]),
   443  			Offset:   inp.Offset,
   444  		})
   445  	}
   446  	fmt := b.outFormat
   447  	if f := desc.PixelFormat; f != driver.TextureFormatOutput {
   448  		fmt = formatFor(f)
   449  	}
   450  	pass, err := vk.CreateRenderPass(b.dev, fmt, vk.ATTACHMENT_LOAD_OP_DONT_CARE,
   451  		vk.IMAGE_LAYOUT_UNDEFINED, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, nil)
   452  	if err != nil {
   453  		return nil, mapErr(err)
   454  	}
   455  	defer vk.DestroyRenderPass(b.dev, pass)
   456  	pipe, err := vk.CreateGraphicsPipeline(b.dev, pass, vs.module, fs.module, blend.Enable, factorFor(blend.SrcFactor), factorFor(blend.DstFactor), top, binds, attrs, descPool.layout)
   457  	if err != nil {
   458  		descPool.release(b.dev)
   459  		return nil, mapErr(err)
   460  	}
   461  	p := &Pipeline{backend: b, pipe: pipe, desc: descPool, pushRanges: ranges, ninputs: len(inputs)}
   462  	b.allPipes = append(b.allPipes, p)
   463  	return p, nil
   464  }
   465  
   466  func (b *Backend) NewComputeProgram(src shader.Sources) (driver.Program, error) {
   467  	sh, err := b.newShader(src, vk.SHADER_STAGE_COMPUTE_BIT)
   468  	if err != nil {
   469  		return nil, mapErr(err)
   470  	}
   471  	defer sh.Release()
   472  	descPool, err := createPipelineLayout(b.dev, src, nil)
   473  	if err != nil {
   474  		return nil, mapErr(err)
   475  	}
   476  	pipe, err := vk.CreateComputePipeline(b.dev, sh.module, descPool.layout)
   477  	if err != nil {
   478  		descPool.release(b.dev)
   479  		return nil, mapErr(err)
   480  	}
   481  	return &Pipeline{backend: b, pipe: pipe, desc: descPool}, nil
   482  }
   483  
   484  func vertFormatFor(f shader.InputLocation) vk.Format {
   485  	t := f.Type
   486  	s := f.Size
   487  	switch {
   488  	case t == shader.DataTypeFloat && s == 1:
   489  		return vk.FORMAT_R32_SFLOAT
   490  	case t == shader.DataTypeFloat && s == 2:
   491  		return vk.FORMAT_R32G32_SFLOAT
   492  	case t == shader.DataTypeFloat && s == 3:
   493  		return vk.FORMAT_R32G32B32_SFLOAT
   494  	case t == shader.DataTypeFloat && s == 4:
   495  		return vk.FORMAT_R32G32B32A32_SFLOAT
   496  	default:
   497  		panic("unsupported data type")
   498  	}
   499  }
   500  
   501  func createPipelineLayout(d vk.Device, src shader.Sources, ranges []vk.PushConstantRange) (*descPool, error) {
   502  	var (
   503  		descLayouts []vk.DescriptorSetLayout
   504  		descLayout  vk.DescriptorSetLayout
   505  	)
   506  	texBinds := make([]int, len(src.Textures))
   507  	imgBinds := make([]int, len(src.Images))
   508  	bufBinds := make([]int, len(src.StorageBuffers))
   509  	var descBinds []vk.DescriptorSetLayoutBinding
   510  	for i, t := range src.Textures {
   511  		descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
   512  			Binding:        t.Binding,
   513  			StageFlags:     vk.SHADER_STAGE_FRAGMENT_BIT,
   514  			DescriptorType: vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
   515  		})
   516  		texBinds[i] = t.Binding
   517  	}
   518  	for i, img := range src.Images {
   519  		descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
   520  			Binding:        img.Binding,
   521  			StageFlags:     vk.SHADER_STAGE_COMPUTE_BIT,
   522  			DescriptorType: vk.DESCRIPTOR_TYPE_STORAGE_IMAGE,
   523  		})
   524  		imgBinds[i] = img.Binding
   525  	}
   526  	for i, buf := range src.StorageBuffers {
   527  		descBinds = append(descBinds, vk.DescriptorSetLayoutBinding{
   528  			Binding:        buf.Binding,
   529  			StageFlags:     vk.SHADER_STAGE_COMPUTE_BIT,
   530  			DescriptorType: vk.DESCRIPTOR_TYPE_STORAGE_BUFFER,
   531  		})
   532  		bufBinds[i] = buf.Binding
   533  	}
   534  	if len(descBinds) > 0 {
   535  		var err error
   536  		descLayout, err = vk.CreateDescriptorSetLayout(d, descBinds)
   537  		if err != nil {
   538  			return nil, err
   539  		}
   540  		descLayouts = append(descLayouts, descLayout)
   541  	}
   542  	layout, err := vk.CreatePipelineLayout(d, ranges, descLayouts)
   543  	if err != nil {
   544  		if descLayout != 0 {
   545  			vk.DestroyDescriptorSetLayout(d, descLayout)
   546  		}
   547  		return nil, err
   548  	}
   549  	descPool := &descPool{
   550  		texBinds:   texBinds,
   551  		bufBinds:   bufBinds,
   552  		imgBinds:   imgBinds,
   553  		layout:     layout,
   554  		descLayout: descLayout,
   555  	}
   556  	return descPool, nil
   557  }
   558  
   559  func (b *Backend) newShader(src shader.Sources, stage vk.ShaderStageFlags) (*Shader, error) {
   560  	mod, err := vk.CreateShaderModule(b.dev, src.SPIRV)
   561  	if err != nil {
   562  		return nil, err
   563  	}
   564  
   565  	sh := &Shader{dev: b.dev, module: mod, src: src}
   566  	if locs := src.Uniforms.Locations; len(locs) > 0 {
   567  		pushOffset := 0x7fffffff
   568  		for _, l := range locs {
   569  			if l.Offset < pushOffset {
   570  				pushOffset = l.Offset
   571  			}
   572  		}
   573  		sh.pushRange = vk.BuildPushConstantRange(stage, pushOffset, src.Uniforms.Size)
   574  	}
   575  	return sh, nil
   576  }
   577  
   578  func (b *Backend) CopyTexture(dstTex driver.Texture, dorig image.Point, srcFBO driver.Texture, srect image.Rectangle) {
   579  	dst := dstTex.(*Texture)
   580  	src := srcFBO.(*Texture)
   581  	cmdBuf := b.ensureCmdBuf()
   582  	op := vk.BuildImageCopy(srect.Min.X, srect.Min.Y, dorig.X, dorig.Y, srect.Dx(), srect.Dy())
   583  	src.imageBarrier(cmdBuf,
   584  		vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   585  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   586  		vk.ACCESS_TRANSFER_READ_BIT,
   587  	)
   588  	dst.imageBarrier(cmdBuf,
   589  		vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
   590  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   591  		vk.ACCESS_TRANSFER_WRITE_BIT,
   592  	)
   593  	vk.CmdCopyImage(cmdBuf, src.img, src.layout, dst.img, dst.layout, []vk.ImageCopy{op})
   594  }
   595  
   596  func (b *Backend) Viewport(x, y, width, height int) {
   597  	cmdBuf := b.currentCmdBuf()
   598  	vp := vk.BuildViewport(float32(x), float32(y), float32(width), float32(height))
   599  	vk.CmdSetViewport(cmdBuf, 0, vp)
   600  }
   601  
   602  func (b *Backend) DrawArrays(off, count int) {
   603  	cmdBuf := b.currentCmdBuf()
   604  	if b.desc.dirty {
   605  		b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_GRAPHICS, b.desc.texBinds, b.desc.bufBinds)
   606  		b.desc.dirty = false
   607  	}
   608  	vk.CmdDraw(cmdBuf, count, 1, off, 0)
   609  }
   610  
   611  func (b *Backend) DrawElements(off, count int) {
   612  	cmdBuf := b.currentCmdBuf()
   613  	if b.desc.dirty {
   614  		b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_GRAPHICS, b.desc.texBinds, b.desc.bufBinds)
   615  		b.desc.dirty = false
   616  	}
   617  	vk.CmdDrawIndexed(cmdBuf, count, 1, off, 0, 0)
   618  }
   619  
   620  func (b *Backend) BindImageTexture(unit int, tex driver.Texture) {
   621  	t := tex.(*Texture)
   622  	b.desc.texBinds[unit] = t
   623  	b.desc.dirty = true
   624  	t.imageBarrier(b.currentCmdBuf(),
   625  		vk.IMAGE_LAYOUT_GENERAL,
   626  		vk.PIPELINE_STAGE_COMPUTE_SHADER_BIT,
   627  		vk.ACCESS_SHADER_READ_BIT|vk.ACCESS_SHADER_WRITE_BIT,
   628  	)
   629  }
   630  
   631  func (b *Backend) DispatchCompute(x, y, z int) {
   632  	cmdBuf := b.currentCmdBuf()
   633  	if b.desc.dirty {
   634  		b.pipe.desc.bindDescriptorSet(b, cmdBuf, vk.PIPELINE_BIND_POINT_COMPUTE, b.desc.texBinds, b.desc.bufBinds)
   635  		b.desc.dirty = false
   636  	}
   637  	vk.CmdDispatch(cmdBuf, x, y, z)
   638  }
   639  
   640  func (t *Texture) Upload(offset, size image.Point, pixels []byte, stride int) {
   641  	if stride == 0 {
   642  		stride = size.X * 4
   643  	}
   644  	cmdBuf := t.backend.ensureCmdBuf()
   645  	dstStride := size.X * 4
   646  	n := size.Y * dstStride
   647  	stage, mem, off := t.backend.stagingBuffer(n)
   648  	var srcOff, dstOff int
   649  	for y := 0; y < size.Y; y++ {
   650  		srcRow := pixels[srcOff : srcOff+dstStride]
   651  		dstRow := mem[dstOff : dstOff+dstStride]
   652  		copy(dstRow, srcRow)
   653  		dstOff += dstStride
   654  		srcOff += stride
   655  	}
   656  	op := vk.BuildBufferImageCopy(off, dstStride/4, offset.X, offset.Y, size.X, size.Y)
   657  	t.imageBarrier(cmdBuf,
   658  		vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
   659  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   660  		vk.ACCESS_TRANSFER_WRITE_BIT,
   661  	)
   662  	vk.CmdCopyBufferToImage(cmdBuf, stage.buf, t.img, t.layout, op)
   663  	// Build mipmaps by repeating linear blits.
   664  	w, h := t.width, t.height
   665  	for i := 1; i < t.mipmaps; i++ {
   666  		nw, nh := w/2, h/2
   667  		if nh < 1 {
   668  			nh = 1
   669  		}
   670  		if nw < 1 {
   671  			nw = 1
   672  		}
   673  		// Transition previous (source) level.
   674  		b := vk.BuildImageMemoryBarrier(
   675  			t.img,
   676  			vk.ACCESS_TRANSFER_WRITE_BIT, vk.ACCESS_TRANSFER_READ_BIT,
   677  			vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   678  			i-1, 1,
   679  		)
   680  		vk.CmdPipelineBarrier(cmdBuf, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
   681  		// Blit to this mipmap level.
   682  		blit := vk.BuildImageBlit(0, 0, 0, 0, w, h, nw, nh, i-1, i)
   683  		vk.CmdBlitImage(cmdBuf, t.img, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, t.img, vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, []vk.ImageBlit{blit}, vk.FILTER_LINEAR)
   684  		w, h = nw, nh
   685  	}
   686  	if t.mipmaps > 1 {
   687  		// Add barrier for last blit.
   688  		b := vk.BuildImageMemoryBarrier(
   689  			t.img,
   690  			vk.ACCESS_TRANSFER_WRITE_BIT, vk.ACCESS_TRANSFER_READ_BIT,
   691  			vk.IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   692  			t.mipmaps-1, 1,
   693  		)
   694  		vk.CmdPipelineBarrier(cmdBuf, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.PIPELINE_STAGE_TRANSFER_BIT, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
   695  		t.layout = vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
   696  	}
   697  }
   698  
   699  func (t *Texture) Release() {
   700  	if t.foreign {
   701  		panic("external textures cannot be released")
   702  	}
   703  	freet := *t
   704  	t.backend.deferFunc(func(d vk.Device) {
   705  		if freet.fbo != 0 {
   706  			vk.DestroyFramebuffer(d, freet.fbo)
   707  		}
   708  		vk.DestroySampler(d, freet.sampler)
   709  		vk.DestroyImageView(d, freet.view)
   710  		vk.DestroyImage(d, freet.img)
   711  		vk.FreeMemory(d, freet.mem)
   712  	})
   713  	*t = Texture{}
   714  }
   715  
   716  func (p *Pipeline) Release() {
   717  	freep := *p
   718  	p.backend.deferFunc(func(d vk.Device) {
   719  		freep.desc.release(d)
   720  		vk.DestroyPipeline(d, freep.pipe)
   721  	})
   722  	*p = Pipeline{}
   723  }
   724  
   725  func (p *descPool) release(d vk.Device) {
   726  	if p := p.pool; p != 0 {
   727  		vk.DestroyDescriptorPool(d, p)
   728  	}
   729  	if l := p.descLayout; l != 0 {
   730  		vk.DestroyDescriptorSetLayout(d, l)
   731  	}
   732  	vk.DestroyPipelineLayout(d, p.layout)
   733  }
   734  
   735  func (p *descPool) bindDescriptorSet(b *Backend, cmdBuf vk.CommandBuffer, bindPoint vk.PipelineBindPoint, texBinds [texUnits]*Texture, bufBinds [storageUnits]*Buffer) {
   736  	if p.size == len(p.sets) {
   737  		l := p.descLayout
   738  		if l == 0 {
   739  			panic("vulkan: descriptor set is dirty, but pipeline has empty layout")
   740  		}
   741  		newCap := len(p.sets) * 2
   742  		if pool := p.pool; pool != 0 {
   743  			b.deferFunc(func(d vk.Device) {
   744  				vk.DestroyDescriptorPool(d, pool)
   745  			})
   746  		}
   747  		const initialPoolSize = 100
   748  		if newCap < initialPoolSize {
   749  			newCap = initialPoolSize
   750  		}
   751  		var poolSizes []vk.DescriptorPoolSize
   752  		if n := len(p.texBinds); n > 0 {
   753  			poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, newCap*n))
   754  		}
   755  		if n := len(p.imgBinds); n > 0 {
   756  			poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_STORAGE_IMAGE, newCap*n))
   757  		}
   758  		if n := len(p.bufBinds); n > 0 {
   759  			poolSizes = append(poolSizes, vk.BuildDescriptorPoolSize(vk.DESCRIPTOR_TYPE_STORAGE_BUFFER, newCap*n))
   760  		}
   761  		pool, err := vk.CreateDescriptorPool(b.dev, newCap, poolSizes)
   762  		if err != nil {
   763  			panic(fmt.Errorf("vulkan: failed to allocate descriptor pool with %d descriptors: %v", newCap, err))
   764  		}
   765  		p.pool = pool
   766  		sets, err := vk.AllocateDescriptorSets(b.dev, p.pool, l, newCap)
   767  		if err != nil {
   768  			panic(fmt.Errorf("vulkan: failed to allocate descriptor with %d sets: %v", newCap, err))
   769  		}
   770  		p.sets = sets
   771  		p.size = 0
   772  	}
   773  	descSet := p.sets[p.size]
   774  	p.size++
   775  	for _, bind := range p.texBinds {
   776  		tex := texBinds[bind]
   777  		write := vk.BuildWriteDescriptorSetImage(descSet, bind, vk.DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, tex.sampler, tex.view, vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
   778  		vk.UpdateDescriptorSet(b.dev, write)
   779  	}
   780  	for _, bind := range p.imgBinds {
   781  		tex := texBinds[bind]
   782  		write := vk.BuildWriteDescriptorSetImage(descSet, bind, vk.DESCRIPTOR_TYPE_STORAGE_IMAGE, 0, tex.view, vk.IMAGE_LAYOUT_GENERAL)
   783  		vk.UpdateDescriptorSet(b.dev, write)
   784  	}
   785  	for _, bind := range p.bufBinds {
   786  		buf := bufBinds[bind]
   787  		write := vk.BuildWriteDescriptorSetBuffer(descSet, bind, vk.DESCRIPTOR_TYPE_STORAGE_BUFFER, buf.buf)
   788  		vk.UpdateDescriptorSet(b.dev, write)
   789  	}
   790  	vk.CmdBindDescriptorSets(cmdBuf, bindPoint, p.layout, 0, []vk.DescriptorSet{descSet})
   791  }
   792  
   793  func (t *Texture) imageBarrier(cmdBuf vk.CommandBuffer, layout vk.ImageLayout, stage vk.PipelineStageFlags, access vk.AccessFlags) {
   794  	srcStage := t.scope.stage
   795  	if srcStage == 0 && t.layout == layout {
   796  		t.scope.stage = stage
   797  		t.scope.access = access
   798  		return
   799  	}
   800  	if srcStage == 0 {
   801  		srcStage = vk.PIPELINE_STAGE_TOP_OF_PIPE_BIT
   802  	}
   803  	b := vk.BuildImageMemoryBarrier(
   804  		t.img,
   805  		t.scope.access, access,
   806  		t.layout, layout,
   807  		0, vk.REMAINING_MIP_LEVELS,
   808  	)
   809  	vk.CmdPipelineBarrier(cmdBuf, srcStage, stage, vk.DEPENDENCY_BY_REGION_BIT, nil, nil, []vk.ImageMemoryBarrier{b})
   810  	t.layout = layout
   811  	t.scope.stage = stage
   812  	t.scope.access = access
   813  }
   814  
   815  func (b *Backend) PrepareTexture(tex driver.Texture) {
   816  	t := tex.(*Texture)
   817  	cmdBuf := b.ensureCmdBuf()
   818  	t.imageBarrier(cmdBuf,
   819  		vk.IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
   820  		vk.PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
   821  		vk.ACCESS_SHADER_READ_BIT,
   822  	)
   823  }
   824  
   825  func (b *Backend) BindTexture(unit int, tex driver.Texture) {
   826  	t := tex.(*Texture)
   827  	b.desc.texBinds[unit] = t
   828  	b.desc.dirty = true
   829  }
   830  
   831  func (b *Backend) BindPipeline(pipe driver.Pipeline) {
   832  	b.bindPipeline(pipe.(*Pipeline), vk.PIPELINE_BIND_POINT_GRAPHICS)
   833  }
   834  
   835  func (b *Backend) BindProgram(prog driver.Program) {
   836  	b.bindPipeline(prog.(*Pipeline), vk.PIPELINE_BIND_POINT_COMPUTE)
   837  }
   838  
   839  func (b *Backend) bindPipeline(p *Pipeline, point vk.PipelineBindPoint) {
   840  	b.pipe = p
   841  	b.desc.dirty = p.desc.descLayout != 0
   842  	cmdBuf := b.currentCmdBuf()
   843  	vk.CmdBindPipeline(cmdBuf, point, p.pipe)
   844  }
   845  
   846  func (s *Shader) Release() {
   847  	vk.DestroyShaderModule(s.dev, s.module)
   848  	*s = Shader{}
   849  }
   850  
   851  func (b *Backend) BindStorageBuffer(binding int, buffer driver.Buffer) {
   852  	buf := buffer.(*Buffer)
   853  	b.desc.bufBinds[binding] = buf
   854  	b.desc.dirty = true
   855  	buf.barrier(b.currentCmdBuf(),
   856  		vk.PIPELINE_STAGE_COMPUTE_SHADER_BIT,
   857  		vk.ACCESS_SHADER_READ_BIT|vk.ACCESS_SHADER_WRITE_BIT,
   858  	)
   859  }
   860  
   861  func (b *Backend) BindUniforms(buffer driver.Buffer) {
   862  	buf := buffer.(*Buffer)
   863  	cmdBuf := b.currentCmdBuf()
   864  	for _, s := range b.pipe.pushRanges {
   865  		off := s.Offset()
   866  		vk.CmdPushConstants(cmdBuf, b.pipe.desc.layout, s.StageFlags(), off, buf.store[off:off+s.Size()])
   867  	}
   868  }
   869  
   870  func (b *Backend) BindVertexBuffer(buffer driver.Buffer, offset int) {
   871  	buf := buffer.(*Buffer)
   872  	cmdBuf := b.currentCmdBuf()
   873  	b.bindings = b.bindings[:0]
   874  	b.offsets = b.offsets[:0]
   875  	for i := 0; i < b.pipe.ninputs; i++ {
   876  		b.bindings = append(b.bindings, buf.buf)
   877  		b.offsets = append(b.offsets, vk.DeviceSize(offset))
   878  	}
   879  	vk.CmdBindVertexBuffers(cmdBuf, 0, b.bindings, b.offsets)
   880  }
   881  
   882  func (b *Backend) BindIndexBuffer(buffer driver.Buffer) {
   883  	buf := buffer.(*Buffer)
   884  	cmdBuf := b.currentCmdBuf()
   885  	vk.CmdBindIndexBuffer(cmdBuf, buf.buf, 0, vk.INDEX_TYPE_UINT16)
   886  }
   887  
   888  func (b *Buffer) Download(data []byte) error {
   889  	if b.buf == 0 {
   890  		copy(data, b.store)
   891  		return nil
   892  	}
   893  	stage, mem, off := b.backend.stagingBuffer(len(data))
   894  	cmdBuf := b.backend.ensureCmdBuf()
   895  	b.barrier(cmdBuf,
   896  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   897  		vk.ACCESS_TRANSFER_READ_BIT,
   898  	)
   899  	vk.CmdCopyBuffer(cmdBuf, b.buf, stage.buf, 0, off, len(data))
   900  	stage.scope.stage = vk.PIPELINE_STAGE_TRANSFER_BIT
   901  	stage.scope.access = vk.ACCESS_TRANSFER_WRITE_BIT
   902  	stage.barrier(cmdBuf,
   903  		vk.PIPELINE_STAGE_HOST_BIT,
   904  		vk.ACCESS_HOST_READ_BIT,
   905  	)
   906  	b.backend.submitCmdBuf(b.backend.fence)
   907  	vk.WaitForFences(b.backend.dev, b.backend.fence)
   908  	vk.ResetFences(b.backend.dev, b.backend.fence)
   909  	copy(data, mem)
   910  	return nil
   911  }
   912  
   913  func (b *Buffer) Upload(data []byte) {
   914  	if b.buf == 0 {
   915  		copy(b.store, data)
   916  		return
   917  	}
   918  	stage, mem, off := b.backend.stagingBuffer(len(data))
   919  	copy(mem, data)
   920  	cmdBuf := b.backend.ensureCmdBuf()
   921  	b.barrier(cmdBuf,
   922  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   923  		vk.ACCESS_TRANSFER_WRITE_BIT,
   924  	)
   925  	vk.CmdCopyBuffer(cmdBuf, stage.buf, b.buf, off, 0, len(data))
   926  	var access vk.AccessFlags
   927  	if b.usage&vk.BUFFER_USAGE_INDEX_BUFFER_BIT != 0 {
   928  		access |= vk.ACCESS_INDEX_READ_BIT
   929  	}
   930  	if b.usage&vk.BUFFER_USAGE_VERTEX_BUFFER_BIT != 0 {
   931  		access |= vk.ACCESS_VERTEX_ATTRIBUTE_READ_BIT
   932  	}
   933  	if access != 0 {
   934  		b.barrier(cmdBuf,
   935  			vk.PIPELINE_STAGE_VERTEX_INPUT_BIT,
   936  			access,
   937  		)
   938  	}
   939  }
   940  
   941  func (b *Buffer) barrier(cmdBuf vk.CommandBuffer, stage vk.PipelineStageFlags, access vk.AccessFlags) {
   942  	srcStage := b.scope.stage
   943  	if srcStage == 0 {
   944  		b.scope.stage = stage
   945  		b.scope.access = access
   946  		return
   947  	}
   948  	barrier := vk.BuildBufferMemoryBarrier(
   949  		b.buf,
   950  		b.scope.access, access,
   951  	)
   952  	vk.CmdPipelineBarrier(cmdBuf, srcStage, stage, vk.DEPENDENCY_BY_REGION_BIT, nil, []vk.BufferMemoryBarrier{barrier}, nil)
   953  	b.scope.stage = stage
   954  	b.scope.access = access
   955  }
   956  
   957  func (b *Buffer) Release() {
   958  	freeb := *b
   959  	if freeb.buf != 0 {
   960  		b.backend.deferFunc(func(d vk.Device) {
   961  			vk.DestroyBuffer(d, freeb.buf)
   962  			vk.FreeMemory(d, freeb.mem)
   963  		})
   964  	}
   965  	*b = Buffer{}
   966  }
   967  
   968  func (t *Texture) ReadPixels(src image.Rectangle, pixels []byte, stride int) error {
   969  	if len(pixels) == 0 {
   970  		return nil
   971  	}
   972  	sz := src.Size()
   973  	stageStride := sz.X * 4
   974  	n := sz.Y * stageStride
   975  	stage, mem, off := t.backend.stagingBuffer(n)
   976  	cmdBuf := t.backend.ensureCmdBuf()
   977  	region := vk.BuildBufferImageCopy(off, stageStride/4, src.Min.X, src.Min.Y, sz.X, sz.Y)
   978  	t.imageBarrier(cmdBuf,
   979  		vk.IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
   980  		vk.PIPELINE_STAGE_TRANSFER_BIT,
   981  		vk.ACCESS_TRANSFER_READ_BIT,
   982  	)
   983  	vk.CmdCopyImageToBuffer(cmdBuf, t.img, t.layout, stage.buf, []vk.BufferImageCopy{region})
   984  	stage.scope.stage = vk.PIPELINE_STAGE_TRANSFER_BIT
   985  	stage.scope.access = vk.ACCESS_TRANSFER_WRITE_BIT
   986  	stage.barrier(cmdBuf,
   987  		vk.PIPELINE_STAGE_HOST_BIT,
   988  		vk.ACCESS_HOST_READ_BIT,
   989  	)
   990  	t.backend.submitCmdBuf(t.backend.fence)
   991  	vk.WaitForFences(t.backend.dev, t.backend.fence)
   992  	vk.ResetFences(t.backend.dev, t.backend.fence)
   993  	var srcOff, dstOff int
   994  	for y := 0; y < sz.Y; y++ {
   995  		dstRow := pixels[srcOff : srcOff+stageStride]
   996  		srcRow := mem[dstOff : dstOff+stageStride]
   997  		copy(dstRow, srcRow)
   998  		dstOff += stageStride
   999  		srcOff += stride
  1000  	}
  1001  	return nil
  1002  }
  1003  
  1004  func (b *Backend) currentCmdBuf() vk.CommandBuffer {
  1005  	cur := b.cmdPool.current
  1006  	if cur == nil {
  1007  		panic("vulkan: invalid operation outside a render or compute pass")
  1008  	}
  1009  	return cur
  1010  }
  1011  
  1012  func (b *Backend) ensureCmdBuf() vk.CommandBuffer {
  1013  	if b.cmdPool.current != nil {
  1014  		return b.cmdPool.current
  1015  	}
  1016  	if b.cmdPool.used < len(b.cmdPool.buffers) {
  1017  		buf := b.cmdPool.buffers[b.cmdPool.used]
  1018  		b.cmdPool.current = buf
  1019  	} else {
  1020  		buf, err := vk.AllocateCommandBuffer(b.dev, b.cmdPool.pool)
  1021  		if err != nil {
  1022  			panic(err)
  1023  		}
  1024  		b.cmdPool.buffers = append(b.cmdPool.buffers, buf)
  1025  		b.cmdPool.current = buf
  1026  	}
  1027  	b.cmdPool.used++
  1028  	buf := b.cmdPool.current
  1029  	if err := vk.BeginCommandBuffer(buf); err != nil {
  1030  		panic(err)
  1031  	}
  1032  	return buf
  1033  }
  1034  
  1035  func (b *Backend) BeginRenderPass(tex driver.Texture, d driver.LoadDesc) {
  1036  	t := tex.(*Texture)
  1037  	var vkop vk.AttachmentLoadOp
  1038  	switch d.Action {
  1039  	case driver.LoadActionClear:
  1040  		vkop = vk.ATTACHMENT_LOAD_OP_CLEAR
  1041  	case driver.LoadActionInvalidate:
  1042  		vkop = vk.ATTACHMENT_LOAD_OP_DONT_CARE
  1043  	case driver.LoadActionKeep:
  1044  		vkop = vk.ATTACHMENT_LOAD_OP_LOAD
  1045  	}
  1046  	cmdBuf := b.ensureCmdBuf()
  1047  	if sem := t.acquire; sem != 0 {
  1048  		// The render pass targets a framebuffer that has an associated acquire semaphore.
  1049  		// Wait for it by forming an execution barrier.
  1050  		b.waitSems = append(b.waitSems, sem)
  1051  		b.waitStages = append(b.waitStages, vk.PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT)
  1052  		// But only for the first pass in a frame.
  1053  		t.acquire = 0
  1054  	}
  1055  	t.imageBarrier(cmdBuf,
  1056  		vk.IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1057  		vk.PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  1058  		vk.ACCESS_COLOR_ATTACHMENT_READ_BIT|vk.ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  1059  	)
  1060  	pass := b.lookupPass(t.format, vkop, t.layout, t.passLayout)
  1061  	col := d.ClearColor
  1062  	vk.CmdBeginRenderPass(cmdBuf, pass, t.fbo, t.width, t.height, [4]float32{col.R, col.G, col.B, col.A})
  1063  	t.layout = t.passLayout
  1064  	// If the render pass describes an automatic image layout transition to its final layout, there
  1065  	// is an implicit image barrier with destination PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT. Make
  1066  	// sure any subsequent barrier includes the transition.
  1067  	// See also https://www.khronos.org/registry/vulkan/specs/1.0/html/vkspec.html#VkSubpassDependency.
  1068  	t.scope.stage |= vk.PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
  1069  }
  1070  
  1071  func (b *Backend) EndRenderPass() {
  1072  	vk.CmdEndRenderPass(b.cmdPool.current)
  1073  }
  1074  
  1075  func (b *Backend) BeginCompute() {
  1076  	b.ensureCmdBuf()
  1077  }
  1078  
  1079  func (b *Backend) EndCompute() {
  1080  }
  1081  
  1082  func (b *Backend) lookupPass(fmt vk.Format, loadAct vk.AttachmentLoadOp, initLayout, finalLayout vk.ImageLayout) vk.RenderPass {
  1083  	key := passKey{fmt: fmt, loadAct: loadAct, initLayout: initLayout, finalLayout: finalLayout}
  1084  	if pass, ok := b.passes[key]; ok {
  1085  		return pass
  1086  	}
  1087  	pass, err := vk.CreateRenderPass(b.dev, fmt, loadAct, initLayout, finalLayout, nil)
  1088  	if err != nil {
  1089  		panic(err)
  1090  	}
  1091  	b.passes[key] = pass
  1092  	return pass
  1093  }
  1094  
  1095  func (b *Backend) submitCmdBuf(fence vk.Fence) {
  1096  	buf := b.cmdPool.current
  1097  	if buf == nil && fence == 0 {
  1098  		return
  1099  	}
  1100  	buf = b.ensureCmdBuf()
  1101  	b.cmdPool.current = nil
  1102  	if err := vk.EndCommandBuffer(buf); err != nil {
  1103  		panic(err)
  1104  	}
  1105  	if err := vk.QueueSubmit(b.queue, buf, b.waitSems, b.waitStages, b.sigSems, fence); err != nil {
  1106  		panic(err)
  1107  	}
  1108  	b.waitSems = b.waitSems[:0]
  1109  	b.sigSems = b.sigSems[:0]
  1110  	b.waitStages = b.waitStages[:0]
  1111  }
  1112  
  1113  func (b *Backend) stagingBuffer(size int) (*Buffer, []byte, int) {
  1114  	if b.staging.size+size > b.staging.cap {
  1115  		if b.staging.buf != nil {
  1116  			vk.UnmapMemory(b.dev, b.staging.buf.mem)
  1117  			b.staging.buf.Release()
  1118  			b.staging.cap = 0
  1119  		}
  1120  		cap := 2 * (b.staging.size + size)
  1121  		buf, err := b.newBuffer(cap, vk.BUFFER_USAGE_TRANSFER_SRC_BIT|vk.BUFFER_USAGE_TRANSFER_DST_BIT,
  1122  			vk.MEMORY_PROPERTY_HOST_VISIBLE_BIT|vk.MEMORY_PROPERTY_HOST_COHERENT_BIT)
  1123  		if err != nil {
  1124  			panic(err)
  1125  		}
  1126  		mem, err := vk.MapMemory(b.dev, buf.mem, 0, cap)
  1127  		if err != nil {
  1128  			buf.Release()
  1129  			panic(err)
  1130  		}
  1131  		b.staging.buf = buf
  1132  		b.staging.mem = mem
  1133  		b.staging.size = 0
  1134  		b.staging.cap = cap
  1135  	}
  1136  	off := b.staging.size
  1137  	b.staging.size += size
  1138  	mem := b.staging.mem[off : off+size]
  1139  	return b.staging.buf, mem, off
  1140  }
  1141  
  1142  func formatFor(format driver.TextureFormat) vk.Format {
  1143  	switch format {
  1144  	case driver.TextureFormatRGBA8:
  1145  		return vk.FORMAT_R8G8B8A8_UNORM
  1146  	case driver.TextureFormatSRGBA:
  1147  		return vk.FORMAT_R8G8B8A8_SRGB
  1148  	case driver.TextureFormatFloat:
  1149  		return vk.FORMAT_R16_SFLOAT
  1150  	default:
  1151  		panic("unsupported texture format")
  1152  	}
  1153  }
  1154  
  1155  func mapErr(err error) error {
  1156  	var vkErr vk.Error
  1157  	if errors.As(err, &vkErr) && vkErr == vk.ERROR_DEVICE_LOST {
  1158  		return driver.ErrDeviceLost
  1159  	}
  1160  	return err
  1161  }
  1162  
  1163  func (f *Texture) ImplementsRenderTarget() {}